hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b7857d8d36f71e59b29e67f8dc6f21cdb975e7fe
| 12,099
|
py
|
Python
|
HVQE.py
|
Zuricho/kagome_HVQE
|
bbcb591ac30e81a7b60ce1d2d6c67d92fa25a899
|
[
"Apache-2.0"
] | null | null | null |
HVQE.py
|
Zuricho/kagome_HVQE
|
bbcb591ac30e81a7b60ce1d2d6c67d92fa25a899
|
[
"Apache-2.0"
] | null | null | null |
HVQE.py
|
Zuricho/kagome_HVQE
|
bbcb591ac30e81a7b60ce1d2d6c67d92fa25a899
|
[
"Apache-2.0"
] | null | null | null |
import qem
import _HVQE
from time import time
import os,sys,pickle
import chainer as ch
import numpy as np
from datetime import datetime
usage = """
Usage:
python HVQE.py [job_name] [job_type] [parameters]
job_name: kagome_open, kagome_periodic, triangular, test
job_type: classical, ground_state, 1_excite_state ... [n]_exicte_state
For classical calculation parameters:
[num of eigenstates (recommend 10)]
For ground state calculation parameters:
[number of parameters (recommend n*30)]
"""
# Definations
job_name = sys.argv[1] # choices: kagome_open, kagome_periodic, chain_10, triangular, test
job_type = sys.argv[2] # choices: classical, ground_state, 1_excite_state ... [n]_excite_state
input_graph = "./input/graph_input_%s.txt"%job_name
output_path = "./output/"+job_name
if job_type == "classical":
k = int(sys.argv[3]) # Number of lowest energy eigenstates (for ground state)
elif job_type[-5:] == "state":
n_par = int(sys.argv[3])
else:
print(usage)
exit(0)
# if the output directory does not exist, create it
if not os.path.exists(output_path):
os.mkdir(output_path)
if job_type == "classical":
return_state=True
with open(input_graph, 'r') as file:
exec(file.read())
complete_graph=complete_graph_input
del complete_graph_input
f_out = open(output_path+"/output_classical.txt","w")
# Run ground state
print('Computing the %d lowest energies of %s\n'%(k,job_name))
start=time()
output=qem.ground_state(complete_graph,k,return_state)
if qem.GPU==True: qem.sync()
end=time()
if return_state==False:
f_out.write('The %d lowest energies are %s\n'%(k,output))
if return_state==True:
for i in range(len(output[0])):
f_out.write("%s_state_%d,%.8f\n"%(job_name,i,output[0][i]))
print('Solutions found with ARPACK for %.3f seconds\n'%(end-start))
## Write gs energy to disk
if return_state==False:
np.savetxt(output_path+'/lowest_energies.txt',output)
else:
np.savetxt(output_path+'/lowest_energies.txt',output[0])
# Write gs itself to disk if return_state is True
with open(output_path+'/gs.dat', 'wb') as file:
pickle.dump(output[1][:,0],file,protocol=4)
f_out.close()
elif job_type == "ground_state":
return_state=True
class Name:
pass
class Parameters:
pass
# cmd_args=_HVQE.get_command_line_input()
run_args=Name()
cmd_args=Parameters()
cmd_args.n_par = n_par # number of parameters to be used in the VQE
cmd_args.par_multiplicity = 1 # The parameter multiplicity
cmd_args.n_iter = 1 # Number of iterations of the basinhopping routine
cmd_args.cost_fn = 'energy' # or 'infidelity'
cmd_args.temperature = 1. # Temperature for the metropolis creterion in the scipy basinhopping routine
cmd_args.stepsize = 1. # max stepsize of random displacement per parameter after each local optimization in the scipy basinhopping routine
cmd_args.init_par = None # A list of initial parameters from which the basinhopping routine starts
cmd_args.dump_interval = None # Dump the state of the program to path/dump.dat every dump_interval function calls
try: # Use GPU if CuPy installation is available.
import cupy as xp
run_args.GPU=True
except ImportError:
import numpy as xp
run_args.GPU=False
# Make timestamp in UTC of start
run_args.date_start=str(datetime.utcnow())
# Load the ansatz from graph_input.txt
with open(input_graph, 'r') as file:
exec(file.read())
run_args.complete_graph=complete_graph_input
run_args.init_layer=init_layer_input
run_args.layers =layers_input
del complete_graph_input
del init_layer_input
del layers_input
# Get the number of qubits from the complete_graph.
nodes=[node for edge in run_args.complete_graph for node in edge]
nodes=set(nodes)
run_args.n=len(nodes)
del nodes
# Load the true ground state into memory for computation of infidelities.
gs_reg=qem.Reg(run_args.n)
with open(output_path+'/gs.dat','rb') as file:
gs_reg.psi.re=xp.array(pickle.load(file)).reshape((2,)*run_args.n)
# Print info about current run to stdout.
print('Started basinhopping at',run_args.date_start, 'UTC')
# Prepare the init_reg, whose state is a dimer covering of run_args.complete_graph, as specified by run_args.init_layer.
init_reg=qem.Reg(run_args.n)
for edge in run_args.init_layer:
qem.apply_prepare_singlet(edge,init_reg)
#RUN THE VQE
run_args.start=time()
global reg_psi_list
reg_psi_list = []
global reg_psi
reg_psi = None
vqe_out,reg_psi=_HVQE.run_VQE(cmd_args,run_args,init_reg,gs_reg,reg_psi_list)
reg_psi_list.append(reg_psi)
# save the reg_psi_list
with open(output_path+'/reg_psi_list_0.pkl', 'wb') as file_list:
pickle.dump(reg_psi_list,file_list,protocol=4)
if run_args.GPU==True:
qem.sync()
run_args.end=time()
# Wall-clock time of VQE (hours)
run_args.wall_clock=(run_args.end-run_args.start)/60/60
# Get the infidelity and the energy of the final state irrespective of whether we used the energy or the infidelity as the cost functon.
vqe_out.opt_parameters=ch.Variable(xp.array(vqe_out.opt_parameters))
if cmd_args.cost_fn=='energy':
run_args.E_VQE=vqe_out.cost_VQE #Already a float
run_args.inf_VQE=_HVQE.infidelity_from_parameters(init_reg,run_args.layers,run_args.n,cmd_args.par_multiplicity,vqe_out.opt_parameters,gs_reg)
run_args.inf_VQE=float(run_args.inf_VQE.array)
if cmd_args.cost_fn=='infidelity':
run_args.inf_VQE=vqe_out.cost_VQE #Already a float
run_args.E_VQE,reg_psi=_HVQE.Heisenberg_energy_from_parameters(run_args.complete_graph,init_reg,run_args.layers,run_args.n,cmd_args.par_multiplicity,vqe_out.opt_parameters)
run_args.E_VQE=float(run_args.E_VQE.array)
vqe_out.opt_parameters=vqe_out.opt_parameters.array.tolist() #Convert for printing and storing.
run_args.date_end=str(datetime.utcnow()) # End time in UTC
# Write input and results to disk. If no former output exists, print a line explaining the data in the output file.
output=str([vars(cmd_args),vars(run_args),vars(vqe_out)])
if not os.path.exists(output_path+'/output_ground.txt'):
f=open(output_path+'/output_ground.txt', 'w')
with open(output_path+'/output_ground.txt', 'a') as f:
f.write(output+'\n\n')
print('Finished basinhopping of ',output_path, 'at',run_args.date_end,'UTC, with')
f_out = open(output_path+"/output_HVQE_0_parm%d.txt"%(cmd_args.n_par),"w")
print(vars(cmd_args))
print('init_par =', vqe_out.init_par)
print(' ')
f_out.write('E_VQE,%.8f\n'%(run_args.E_VQE))
f_out.write('inf_VQE,%.8f\n'%(run_args.inf_VQE))
f_out.write('n_fn_calls,%s\n'%(vqe_out.n_fn_calls))
f_out.write('Wall-clock time(hours),%.3f\n'%(run_args.wall_clock))
f_out.close()
elif job_type[-13:] == "_excite_state":
return_state=True
class Name:
pass
class Parameters:
pass
# cmd_args=_HVQE.get_command_line_input()
run_args=Name()
cmd_args=Parameters()
cmd_args.n_par = n_par # number of parameters to be used in the VQE
cmd_args.par_multiplicity = 1 # The parameter multiplicity
cmd_args.n_iter = 1 # Number of iterations of the basinhopping routine
cmd_args.cost_fn = 'energy' # or 'infidelity'
cmd_args.temperature = 1. # Temperature for the metropolis creterion in the scipy basinhopping routine
cmd_args.stepsize = 1. # max stepsize of random displacement per parameter after each local optimization in the scipy basinhopping routine
cmd_args.init_par = None # A list of initial parameters from which the basinhopping routine starts
cmd_args.dump_interval = None # Dump the state of the program to path/dump.dat every dump_interval function calls
try: # Use GPU if CuPy installation is available.
import cupy as xp
run_args.GPU=True
except ImportError:
import numpy as xp
run_args.GPU=False
# Make timestamp in UTC of start
run_args.date_start=str(datetime.utcnow())
# Load the ansatz from graph_input.txt
with open(input_graph, 'r') as file:
exec(file.read())
run_args.complete_graph=complete_graph_input
run_args.init_layer=init_layer_input
run_args.layers =layers_input
del complete_graph_input
del init_layer_input
del layers_input
# Get the number of qubits from the complete_graph.
nodes=[node for edge in run_args.complete_graph for node in edge]
nodes=set(nodes)
run_args.n=len(nodes)
del nodes
# Load the true ground state into memory for computation of infidelities.
gs_reg=qem.Reg(run_args.n)
with open(output_path+'/gs.dat','rb') as file:
gs_reg.psi.re=xp.array(pickle.load(file)).reshape((2,)*run_args.n)
# Print info about current run to stdout.
print('Started basinhopping at',run_args.date_start, 'UTC')
# Prepare the init_reg, whose state is a dimer covering of run_args.complete_graph, as specified by run_args.init_layer.
init_reg=qem.Reg(run_args.n)
for edge in run_args.init_layer:
qem.apply_prepare_singlet(edge,init_reg)
#RUN THE VQE
run_args.start=time()
with open(output_path+'/reg_psi_list_%d.pkl'%(int(job_type[0:-13])-1), 'rb') as file_list_pre:
reg_psi_list = pickle.load(file_list_pre)
reg_psi = None
vqe_out,reg_psi=_HVQE.run_VQE(cmd_args,run_args,init_reg,gs_reg,reg_psi_list)
reg_psi_list.append(reg_psi)
# save the reg_psi_list
with open(output_path+'/reg_psi_list_%d.pkl'%(int(job_type[0:-13])), 'wb') as file_list:
pickle.dump(reg_psi_list,file_list,protocol=4)
if run_args.GPU==True:
qem.sync()
run_args.end=time()
# Wall-clock time of VQE (hours)
run_args.wall_clock=(run_args.end-run_args.start)/60/60
# Get the infidelity and the energy of the final state irrespective of whether we used the energy or the infidelity as the cost functon.
vqe_out.opt_parameters=ch.Variable(xp.array(vqe_out.opt_parameters))
if cmd_args.cost_fn=='energy':
run_args.E_VQE=vqe_out.cost_VQE #Already a float
run_args.inf_VQE=_HVQE.infidelity_from_parameters(init_reg,run_args.layers,run_args.n,cmd_args.par_multiplicity,vqe_out.opt_parameters,gs_reg)
run_args.inf_VQE=float(run_args.inf_VQE.array)
if cmd_args.cost_fn=='infidelity':
run_args.inf_VQE=vqe_out.cost_VQE #Already a float
run_args.E_VQE,reg_psi=_HVQE.Heisenberg_energy_from_parameters(run_args.complete_graph,init_reg,run_args.layers,run_args.n,cmd_args.par_multiplicity,vqe_out.opt_parameters)
run_args.E_VQE=float(run_args.E_VQE.array)
vqe_out.opt_parameters=vqe_out.opt_parameters.array.tolist() #Convert for printing and storing.
run_args.date_end=str(datetime.utcnow()) # End time in UTC
# Write input and results to disk. If no former output exists, print a line explaining the data in the output file.
output=str([vars(cmd_args),vars(run_args),vars(vqe_out)])
if not os.path.exists(output_path+'/output_ground.txt'):
f=open(output_path+'/output_ground.txt', 'w')
with open(output_path+'/output_ground.txt', 'a') as f:
f.write(output+'\n\n')
print('Finished basinhopping of ',output_path, 'at',run_args.date_end,'UTC, with')
f_out = open(output_path+"/output_HVQE_%d_parm%d.txt"%(int(job_type[0:-13]),cmd_args.n_par),"w")
print(vars(cmd_args))
print('init_par =', vqe_out.init_par)
print(' ')
f_out.write('E_VQE,%.8f\n'%(run_args.E_VQE))
f_out.write('inf_VQE,%.8f\n'%(run_args.inf_VQE))
f_out.write('n_fn_calls,%s\n'%(vqe_out.n_fn_calls))
f_out.write('Wall-clock time(hours),%.3f\n'%(run_args.wall_clock))
f_out.close()
else:
pass
| 38.903537
| 180
| 0.706009
| 1,953
| 12,099
| 4.12596
| 0.126472
| 0.072971
| 0.017374
| 0.028295
| 0.847977
| 0.83172
| 0.819558
| 0.808017
| 0.797592
| 0.797592
| 0
| 0.006308
| 0.187619
| 12,099
| 310
| 181
| 39.029032
| 0.813511
| 0.244566
| 0
| 0.747706
| 0
| 0
| 0.134376
| 0.010794
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.022936
| 0.059633
| 0
| 0.077982
| 0.059633
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7873e0f63f935e572e3df8eb9b8ee113244061a
| 90,105
|
py
|
Python
|
BluePlug/api.py
|
liufeng3486/BluePlug
|
c7c5c769ed35c71ebc542d34848d6bf309abd051
|
[
"MIT"
] | 1
|
2019-01-27T04:08:05.000Z
|
2019-01-27T04:08:05.000Z
|
BluePlug/api.py
|
liufeng3486/BluePlug
|
c7c5c769ed35c71ebc542d34848d6bf309abd051
|
[
"MIT"
] | 5
|
2021-03-18T21:35:20.000Z
|
2022-01-13T00:58:18.000Z
|
BluePlug/api.py
|
liufeng3486/BluePlug
|
c7c5c769ed35c71ebc542d34848d6bf309abd051
|
[
"MIT"
] | null | null | null |
# 学习制作网易云音乐客户端。
# 此文件实现登陆查询等一系列功能。
__author__ = 'weiy'
"""
4.10日。
"""
import urllib.parse
import requests
import hashlib
import json
def shotlist(lst):
"""列表去重。"""
temp1 = sorted(list(set(lst)))
return temp1
class WebApi:
"""一些功能。"""
default_timeout = 10
headers = {
'Accept': '*/*',
'Accept-Encoding': 'gzip,deflate,sdch',
'Accept-Language': 'zh-CN,zh;q=0.8',
'Proxy-Connection': 'keep-alive',
'Content-Type': 'application/x-www-form-urlencoded',
'Host': 'music.163.com',
'Referer': 'http://music.163.com/',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36'
}
def __init__(self):
self.cookies = {
'appver': '1.6.1.82809',
'os': 'pc'
}
def httpRequest(self, action, method="GET", add=None, data=None, headers=headers, cookies='',\
timeout=default_timeout, urlencode='utf-8'):
"""
默认以get方式请求,
GET方式附加内容用add参数,POST方式提交内容用data参数。
编码用urlencode参数,默认utf-8。
GET方式返回json形式请求的内容。
POST方式返回cookies和json形式的内容。(0,1)
默认cookies为空。
"""
if method.upper() == 'GET':
if add:
html = requests.get(action, params=add, headers=headers, cookies=cookies, timeout=timeout)
else:
html = requests.get(action, headers=headers, cookies=cookies, timeout=timeout)
html.encoding = urlencode
return json.loads(html.text)
elif method.upper() == 'POST':
if data:
html = requests.post(action, data=data, headers=headers, cookies=cookies, timeout=timeout)
else:
html = requests.post(action, headers=headers, cookies=cookies, timeout=timeout)
html.encoding = urlencode
return html.cookies, json.loads(html.text)
def login(self, username, password):
"""
以网易账号登陆,其他的登陆待写。返回cookies和json形式内容。
"""
data = {
'username': username,
'password': hashlib.md5(password.encode('utf-8')).hexdigest(),
'remeberLogin': 'true'
}
cki = self.httpRequest('http://music.163.com/api/login', method="POST", data=data)
cki[0].set('appver', self.cookies['appver'], domain='music.163.com')
cki[0].set('os', self.cookies['os'], domain='music.163.com')
return cki[0], cki[1]
def user_playlist(self, uid, offset=0):
"""
个人歌单。
"""
url = 'http://music.163.com/api/user/playlist/?offset=%s&limit=1000&uid=%s' % (offset, uid)
html = self.httpRequest(url, method='GET', cookies=self.cookies)
return html['playlist']
def all_playlist(self, cat='全部歌单', types='all', offset=0, index=1):
"""
全部歌单。列表字典形式。
"""
url = 'http://music.163.com/api/playlist/list?cat=%s&type=%s&order=%s&offset=%d&total=true&limit=30&index=%d)'\
% (urllib.parse.quote(cat), types, types, offset, index)
html = self.httpRequest(url, method='GET', cookies=self.cookies)
return html['playlists']
def details_playlist(self,id):
return '''{
"result": {
"subscribers": [],
"subscribed": false,
"creator": {
"defaultAvatar": false,
"province": 110000,
"authStatus": 1,
"followed": false,
"avatarUrl": "http://p1.music.126.net/QWMV-Ru_6149AKe0mCBXKg==/1420569024374784.jpg",
"accountStatus": 0,
"gender": 1,
"city": 110101,
"birthday": -2209017600000,
"userId": 1,
"userType": 2,
"nickname": "网易云音乐",
"signature": "欢迎使用网易云音乐,有任何问题可以联系@云音乐客服, 我们会尽快答复。有关独立音乐人和独立厂牌请站内私信@原创君。",
"description": "网易云音乐官方账号",
"detailDescription": "网易云音乐官方账号",
"avatarImgId": 1420569024374784,
"backgroundImgId": 2002210674180202,
"backgroundUrl": "http://p1.music.126.net/pmHS4fcQtcNEGewNb5HRhg==/2002210674180202.jpg",
"authority": 3,
"mutual": false,
"expertTags": null,
"experts": null,
"djStatus": 10,
"vipType": 11,
"remarkName": null,
"avatarImgIdStr": "1420569024374784",
"backgroundImgIdStr": "2002210674180202"
},
"artists": null,
"tracks": [{
"name": "火焰小溪",
"id": 1297750771,
"position": 1,
"alias": ["\"声音,你好\"公益活动主题曲"],
"status": 0,
"fee": 8,
"copyrightId": 677020,
"disc": "",
"no": 1,
"artists": [{
"name": "林宥嘉",
"id": 3685,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "火焰小溪",
"id": 72071867,
"type": "EP/Single",
"size": 2,
"picId": 109951163440625910,
"blurPicUrl": "http://p2.music.126.net/HeSyftZftDogVH1VkFqN1A==/109951163440625910.jpg",
"companyId": 0,
"pic": 109951163440625910,
"picUrl": "http://p2.music.126.net/HeSyftZftDogVH1VkFqN1A==/109951163440625910.jpg",
"publishTime": 1533398400007,
"description": "",
"tags": "",
"company": "华研",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": ["\"声音,你好\"公益活动主题曲"],
"status": 1,
"copyrightId": 677020,
"commentThreadId": "R_AL_3_72071867",
"artists": [{
"name": "林宥嘉",
"id": 3685,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "109951163440625910"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 274997,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_1297750771",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 0,
"transName": null,
"sign": null,
"hMusic": {
"name": "",
"id": 3419924588,
"size": 11002819,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 274997,
"volumeDelta": -2.0
},
"mMusic": {
"name": "",
"id": 3419924589,
"size": 6601709,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 192000,
"playTime": 274997,
"volumeDelta": -2.0
},
"lMusic": {
"name": "",
"id": 3419924590,
"size": 4401154,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 274997,
"volumeDelta": -1.0
},
"bMusic": {
"name": "",
"id": 3419924590,
"size": 4401154,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 274997,
"volumeDelta": -1.0
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5965315
}, {
"name": "说谎",
"id": 108390,
"position": 6,
"alias": [],
"status": 0,
"fee": 8,
"copyrightId": 677020,
"disc": "1",
"no": 6,
"artists": [{
"name": "林宥嘉",
"id": 3685,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "感官/世界",
"id": 10764,
"type": "专辑",
"size": 11,
"picId": 109951163187404137,
"blurPicUrl": "http://p2.music.126.net/mMZNB-jhYsw29K61QtopJA==/109951163187404137.jpg",
"companyId": 0,
"pic": 109951163187404137,
"picUrl": "http://p2.music.126.net/mMZNB-jhYsw29K61QtopJA==/109951163187404137.jpg",
"publishTime": 1256832000000,
"description": "",
"tags": "",
"company": "华研国际",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 40,
"copyrightId": 1004,
"commentThreadId": "R_AL_3_10764",
"artists": [{
"name": "林宥嘉",
"id": 3685,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "109951163187404137"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 264160,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": "600902000009129439",
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_108390",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": "",
"id": 1426514912,
"size": 10569187,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 264160,
"volumeDelta": 0.0
},
"mMusic": {
"name": "",
"id": 1426514913,
"size": 6341529,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 192000,
"playTime": 264160,
"volumeDelta": 0.0
},
"lMusic": {
"name": "",
"id": 1426514914,
"size": 4227701,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 264160,
"volumeDelta": 0.0
},
"bMusic": {
"name": "",
"id": 1426514914,
"size": 4227701,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 264160,
"volumeDelta": 0.0
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5842732
}, {
"name": "Perfect",
"id": 460043703,
"position": 5,
"alias": [],
"status": 0,
"fee": 8,
"copyrightId": 7002,
"disc": "1",
"no": 5,
"artists": [{
"name": "Ed Sheeran",
"id": 33184,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "÷ (Deluxe)",
"id": 35150843,
"type": "专辑",
"size": 16,
"picId": 18810444929762432,
"blurPicUrl": "http://p2.music.126.net/ARJwzJcDmmd0PYArKnmGCg==/18810444929762432.jpg",
"companyId": 0,
"pic": 18810444929762432,
"picUrl": "http://p2.music.126.net/ARJwzJcDmmd0PYArKnmGCg==/18810444929762432.jpg",
"publishTime": 1488470400007,
"description": "",
"tags": "",
"company": "华纳唱片",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 3,
"copyrightId": 7002,
"commentThreadId": "R_AL_3_35150843",
"artists": [{
"name": "Ed Sheeran",
"id": 33184,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "18810444929762432"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 263400,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_460043703",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1286918421,
"size": 10538885,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 263400,
"volumeDelta": -1.2,
"dfsId_str": null
},
"mMusic": {
"name": null,
"id": 1286918422,
"size": 5269465,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 263400,
"volumeDelta": -0.76,
"dfsId_str": null
},
"lMusic": {
"name": null,
"id": 1286918423,
"size": 3161697,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 263400,
"volumeDelta": -0.79,
"dfsId_str": null
},
"bMusic": {
"name": null,
"id": 1286918423,
"size": 3161697,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 263400,
"volumeDelta": -0.79,
"dfsId_str": null
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5725016
}, {
"name": "River",
"id": 523250559,
"position": 3,
"alias": [],
"status": 0,
"fee": 4,
"copyrightId": 7003,
"disc": "1",
"no": 5,
"artists": [{
"name": "Eminem",
"id": 32665,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Ed Sheeran",
"id": 33184,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "Revival",
"id": 36952205,
"type": "专辑",
"size": 19,
"picId": 18448705602723085,
"blurPicUrl": "http://p2.music.126.net/v-c-6B2aS4sZ_G-i97uiUg==/18448705602723085.jpg",
"companyId": 0,
"pic": 18448705602723085,
"picUrl": "http://p2.music.126.net/v-c-6B2aS4sZ_G-i97uiUg==/18448705602723085.jpg",
"publishTime": 1513296000000,
"description": "",
"tags": "",
"company": "环球唱片",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": -4,
"copyrightId": 7003,
"commentThreadId": "R_AL_3_36952205",
"artists": [{
"name": "Eminem",
"id": 32665,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "18448705602723085"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 221013,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_523250559",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": "",
"id": 1398129440,
"size": 8843015,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 221013,
"volumeDelta": -1.0
},
"mMusic": {
"name": "",
"id": 1398129441,
"size": 5305826,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 192000,
"playTime": 221013,
"volumeDelta": -1.0
},
"lMusic": {
"name": "",
"id": 1398129442,
"size": 3537232,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 221013,
"volumeDelta": -1.0
},
"bMusic": {
"name": "",
"id": 1398129442,
"size": 3537232,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 221013,
"volumeDelta": -1.0
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5841160
}, {
"name": "Best of 2017 Medley",
"id": 526652668,
"position": 1,
"alias": [],
"status": 0,
"fee": 0,
"copyrightId": 0,
"disc": "1",
"no": 1,
"artists": [{
"name": "Anthem Lights",
"id": 86862,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "Best of 2017 Medley",
"id": 37099076,
"type": "EP/Single",
"size": 1,
"picId": 109951163095074756,
"blurPicUrl": "http://p2.music.126.net/0dLExQOaZRizEGO4XQ45eA==/109951163095074756.jpg",
"companyId": 0,
"pic": 109951163095074756,
"picUrl": "http://p2.music.126.net/0dLExQOaZRizEGO4XQ45eA==/109951163095074756.jpg",
"publishTime": 1513872000000,
"description": "",
"tags": "",
"company": "Wavy Records",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 0,
"copyrightId": 0,
"commentThreadId": "R_AL_3_37099076",
"artists": [{
"name": "Anthem Lights",
"id": 86862,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "109951163095074756"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 145998,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_526652668",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 0,
"transName": null,
"sign": null,
"hMusic": null,
"mMusic": {
"name": "",
"id": 1402340446,
"size": 3504631,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 192000,
"playTime": 145998,
"volumeDelta": 0.0
},
"lMusic": {
"name": "",
"id": 1402340447,
"size": 2336435,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 145998,
"volumeDelta": 0.0
},
"bMusic": {
"name": "",
"id": 1402340447,
"size": 2336435,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 145998,
"volumeDelta": 0.0
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5796031
}, {
"name": "极美",
"id": 516657215,
"position": 10,
"alias": [],
"status": 0,
"fee": 4,
"copyrightId": 7003,
"disc": "1",
"no": 10,
"artists": [{
"name": "孙燕姿",
"id": 9272,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "孙燕姿No. 13作品:跳舞的梵谷",
"id": 36714070,
"type": "专辑",
"size": 10,
"picId": 18357446138140955,
"blurPicUrl": "http://p2.music.126.net/_VjuIgInJqwxdyoy4FF3IA==/18357446138140955.jpg",
"companyId": 0,
"pic": 18357446138140955,
"picUrl": "http://p2.music.126.net/_VjuIgInJqwxdyoy4FF3IA==/18357446138140955.jpg",
"publishTime": 1510185600000,
"description": "",
"tags": "",
"company": "环球唱片",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": -4,
"copyrightId": 7003,
"commentThreadId": "R_AL_3_36714070",
"artists": [{
"name": "孙燕姿",
"id": 9272,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "18357446138140955"
},
"starred": false,
"popularity": 95.0,
"score": 95,
"starredNum": 0,
"duration": 211613,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_516657215",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": "",
"id": 1402501222,
"size": 8466852,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 211613,
"volumeDelta": 0.0
},
"mMusic": {
"name": "",
"id": 1402501223,
"size": 5080129,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 192000,
"playTime": 211613,
"volumeDelta": 0.0
},
"lMusic": {
"name": "",
"id": 1402501224,
"size": 3386767,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 211613,
"volumeDelta": 0.0
},
"bMusic": {
"name": "",
"id": 1402501224,
"size": 3386767,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 211613,
"volumeDelta": 0.0
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5810580
}, {
"name": "连名带姓",
"id": 522352195,
"position": 3,
"alias": [],
"status": 0,
"fee": 4,
"copyrightId": 7003,
"disc": "",
"no": 6,
"artists": [{
"name": "张惠妹",
"id": 10559,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "偷故事的人 ",
"id": 36941045,
"type": "专辑",
"size": 10,
"picId": 109951163079119875,
"blurPicUrl": "http://p2.music.126.net/SbJdGLDz9V1_sLffpjMU8g==/109951163079119875.jpg",
"companyId": 0,
"pic": 109951163079119875,
"picUrl": "http://p2.music.126.net/SbJdGLDz9V1_sLffpjMU8g==/109951163079119875.jpg",
"publishTime": 1513008000007,
"description": "",
"tags": "",
"company": "环球唱片",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": -4,
"copyrightId": 7003,
"commentThreadId": "R_AL_3_36941045",
"artists": [{
"name": "张惠妹",
"id": 10559,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "109951163079119875"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 333549,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_522352195",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": "",
"id": 1404214114,
"size": 13344436,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 333549,
"volumeDelta": 0.0
},
"mMusic": {
"name": "",
"id": 1404214115,
"size": 8006679,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 192000,
"playTime": 333549,
"volumeDelta": 0.0
},
"lMusic": {
"name": "",
"id": 1404214116,
"size": 5337800,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 333549,
"volumeDelta": 0.0
},
"bMusic": {
"name": "",
"id": 1404214116,
"size": 5337800,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 333549,
"volumeDelta": 0.0
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5764014
}, {
"name": "Look What You Made Me Do",
"id": 501133611,
"position": 3,
"alias": [],
"status": 0,
"fee": 4,
"copyrightId": 7003,
"disc": "1",
"no": 6,
"artists": [{
"name": "Taylor Swift",
"id": 44266,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "reputation",
"id": 36709029,
"type": "专辑",
"size": 15,
"picId": 109951163054654501,
"blurPicUrl": "http://p2.music.126.net/fdh0myRe6FD87QNJtvGe_A==/109951163054654501.jpg",
"companyId": 0,
"pic": 109951163054654501,
"picUrl": "http://p2.music.126.net/fdh0myRe6FD87QNJtvGe_A==/109951163054654501.jpg",
"publishTime": 1510243200007,
"description": "",
"tags": "",
"company": "环球唱片",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": -4,
"copyrightId": 7003,
"commentThreadId": "R_AL_3_36709029",
"artists": [{
"name": "Taylor Swift",
"id": 44266,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "109951163054654501"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 211859,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_501133611",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": "",
"id": 1350204242,
"size": 8477301,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 211859,
"volumeDelta": -2.0
},
"mMusic": {
"name": "",
"id": 1350204243,
"size": 5086398,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 192000,
"playTime": 211859,
"volumeDelta": -2.0
},
"lMusic": {
"name": "",
"id": 1350204244,
"size": 3390946,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 211859,
"volumeDelta": -1.0
},
"bMusic": {
"name": "",
"id": 1350204244,
"size": 3390946,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 211859,
"volumeDelta": -1.0
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5647227
}, {
"name": "ニア",
"id": 478920777,
"position": 2,
"alias": [],
"status": 0,
"fee": 0,
"copyrightId": 663018,
"disc": "1",
"no": 2,
"artists": [{
"name": "夏代孝明",
"id": 963529,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "トランジット",
"id": 35498460,
"type": "EP/Single",
"size": 6,
"picId": 19168885718782298,
"blurPicUrl": "http://p2.music.126.net/YQy-MQSV1uwoj3tOUxPEcw==/19168885718782298.jpg",
"companyId": 0,
"pic": 19168885718782298,
"picUrl": "http://p2.music.126.net/YQy-MQSV1uwoj3tOUxPEcw==/19168885718782298.jpg",
"publishTime": 1494950400000,
"description": "",
"tags": "",
"company": "TOHO animation RECORDS",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 0,
"copyrightId": 0,
"commentThreadId": "R_AL_3_35498460",
"artists": [{
"name": "夏代孝明",
"id": 963529,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "19168885718782298"
},
"starred": false,
"popularity": 65.0,
"score": 65,
"starredNum": 0,
"duration": 248241,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_478920777",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 0,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1313669864,
"size": 9930754,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 248241,
"volumeDelta": -3.44,
"dfsId_str": null
},
"mMusic": {
"name": null,
"id": 1313669865,
"size": 4965400,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 248241,
"volumeDelta": -3.03,
"dfsId_str": null
},
"lMusic": {
"name": null,
"id": 1313669866,
"size": 2979258,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 248241,
"volumeDelta": -3.1,
"dfsId_str": null
},
"bMusic": {
"name": null,
"id": 1313669866,
"size": 2979258,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 248241,
"volumeDelta": -3.1,
"dfsId_str": null
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5580116
}, {
"name": "잘하고 있잖아",
"id": 475279662,
"position": 1,
"alias": [],
"status": 0,
"fee": 0,
"copyrightId": 0,
"disc": "1",
"no": 1,
"artists": [{
"name": "Mi-Yu",
"id": 1066121,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "田锡万",
"id": 12080207,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "잘하고 있잖아",
"id": 35449204,
"type": "EP/Single",
"size": 1,
"picId": 18676304511884390,
"blurPicUrl": "http://p2.music.126.net/QMNFpTfXCT6JExllxQDcHA==/18676304511884390.jpg",
"companyId": 0,
"pic": 18676304511884390,
"picUrl": "http://p2.music.126.net/QMNFpTfXCT6JExllxQDcHA==/18676304511884390.jpg",
"publishTime": 1493308800007,
"description": "",
"tags": "",
"company": "포크라노스",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 0,
"copyrightId": 0,
"commentThreadId": "R_AL_3_35449204",
"artists": [{
"name": "Mi-Yu",
"id": 1066121,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "18676304511884390"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 229616,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_475279662",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 0,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1308489150,
"size": 9185742,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 229616,
"volumeDelta": -2.22,
"dfsId_str": null
},
"mMusic": {
"name": null,
"id": 1308489151,
"size": 4592894,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 229616,
"volumeDelta": -1.78,
"dfsId_str": null
},
"lMusic": {
"name": null,
"id": 1308489152,
"size": 2755754,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 229616,
"volumeDelta": -1.8,
"dfsId_str": null
},
"bMusic": {
"name": null,
"id": 1308489152,
"size": 2755754,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 229616,
"volumeDelta": -1.8,
"dfsId_str": null
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5896040
}, {
"name": "空",
"id": 488256353,
"position": 5,
"alias": [],
"status": 0,
"fee": 8,
"copyrightId": 457010,
"disc": "1",
"no": 5,
"artists": [{
"name": "GENERATIONS from EXILE TRIBE",
"id": 711243,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "涙を流せないピエロは太陽も月もない空を見上げた",
"id": 35695351,
"type": "专辑",
"size": 13,
"picId": 18743374720672509,
"blurPicUrl": "http://p2.music.126.net/XJ_gxymu79GfmyNQhC-2MA==/18743374720672509.jpg",
"companyId": 0,
"pic": 18743374720672509,
"picUrl": "http://p2.music.126.net/XJ_gxymu79GfmyNQhC-2MA==/18743374720672509.jpg",
"publishTime": 1499184000007,
"description": "",
"tags": "",
"company": "(P)2017 AVEX ENTERTAINMENT INC.",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 1,
"copyrightId": 457010,
"commentThreadId": "R_AL_3_35695351",
"artists": [{
"name": "GENERATIONS from EXILE TRIBE",
"id": 711243,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": "无法流泪的PIERROT仰望不见日月的天空",
"picId_str": "18743374720672509",
"transNames": ["无法流泪的PIERROT仰望不见日月的天空"]
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 310133,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_488256353",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": "电影《哥哥太爱我了怎么办》主题曲",
"sign": null,
"hMusic": {
"name": "",
"id": 1328805427,
"size": 12408207,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 310133,
"volumeDelta": -3.0
},
"mMusic": {
"name": "",
"id": 1328805428,
"size": 7444942,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 192000,
"playTime": 310133,
"volumeDelta": -3.0
},
"lMusic": {
"name": "",
"id": 1328805429,
"size": 4963309,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 310133,
"volumeDelta": -2.0
},
"bMusic": {
"name": "",
"id": 1328805429,
"size": 4963309,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 128000,
"playTime": 310133,
"volumeDelta": -2.0
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5579137,
"transNames": ["电影《哥哥太爱我了怎么办》主题曲"]
}, {
"name": "Forever young",
"id": 475073278,
"position": 4,
"alias": [],
"status": 0,
"fee": 8,
"copyrightId": 502011,
"disc": "1",
"no": 7,
"artists": [{
"name": "朴树",
"id": 4721,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "猎户星座",
"id": 35444067,
"type": "专辑",
"size": 11,
"picId": 18885211718782327,
"blurPicUrl": "http://p2.music.126.net/W_5XiCv3rGS1-J7EXpHSCQ==/18885211718782327.jpg",
"companyId": 0,
"pic": 18885211718782327,
"picUrl": "http://p2.music.126.net/W_5XiCv3rGS1-J7EXpHSCQ==/18885211718782327.jpg",
"publishTime": 1493481600007,
"description": "",
"tags": "",
"company": "不晚音乐/地球娱乐",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 1,
"copyrightId": 502011,
"commentThreadId": "R_AL_3_35444067",
"artists": [{
"name": "朴树",
"id": 4721,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "18885211718782327"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 320431,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_475073278",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 1,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1307505124,
"size": 12819897,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 320431,
"volumeDelta": -1.8,
"dfsId_str": null
},
"mMusic": {
"name": null,
"id": 1307505125,
"size": 6409971,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 320431,
"volumeDelta": -1.38,
"dfsId_str": null
},
"lMusic": {
"name": null,
"id": 1307505126,
"size": 3846000,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 320431,
"volumeDelta": -1.48,
"dfsId_str": null
},
"bMusic": {
"name": null,
"id": 1307505126,
"size": 3846000,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 320431,
"volumeDelta": -1.48,
"dfsId_str": null
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 0
}, {
"name": "I'm the One ",
"id": 475207448,
"position": 1,
"alias": [],
"status": 0,
"fee": 8,
"copyrightId": 7001,
"disc": "1",
"no": 1,
"artists": [{
"name": "DJ Khaled",
"id": 31273,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Justin Bieber",
"id": 35531,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Quavo",
"id": 12086073,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Chance The Rapper",
"id": 749038,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Lil Wayne",
"id": 38118,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "I'm the One",
"id": 35378000,
"type": "EP/Single",
"size": 1,
"picId": 19025949207139508,
"blurPicUrl": "http://p2.music.126.net/erD3reF4IABguYY-1eIabw==/19025949207139508.jpg",
"companyId": 0,
"pic": 19025949207139508,
"picUrl": "http://p2.music.126.net/erD3reF4IABguYY-1eIabw==/19025949207139508.jpg",
"publishTime": 1493308800007,
"description": "",
"tags": "",
"company": "Sony Music Entertainment",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 3,
"copyrightId": 7001,
"commentThreadId": "R_AL_3_35378000",
"artists": [{
"name": "DJ Khaled",
"id": 31273,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Justin Bieber",
"id": 35531,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Quavo",
"id": 12086073,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Chance The Rapper",
"id": 749038,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p2.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Lil Wayne",
"id": 38118,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "19025949207139508"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 288876,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_475207448",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1308751674,
"size": 11557660,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 288876,
"volumeDelta": -2.55,
"dfsId_str": null
},
"mMusic": {
"name": null,
"id": 1308751675,
"size": 5778853,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 288876,
"volumeDelta": -2.15,
"dfsId_str": null
},
"lMusic": {
"name": null,
"id": 1308751676,
"size": 3467329,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 288876,
"volumeDelta": -2.16,
"dfsId_str": null
},
"bMusic": {
"name": null,
"id": 1308751676,
"size": 3467329,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 288876,
"volumeDelta": -2.16,
"dfsId_str": null
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5509013
}, {
"name": "八十年代的歌",
"id": 447925066,
"position": 7,
"alias": ["Song Of The 80s"],
"status": 0,
"fee": 8,
"copyrightId": 36016,
"disc": "1",
"no": 9,
"artists": [{
"name": "赵雷",
"id": 6731,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "无法长大",
"id": 35069014,
"type": "专辑",
"size": 10,
"picId": 18548761162235571,
"blurPicUrl": "http://p1.music.126.net/BJgUd9aD9gpougZFASRTTw==/18548761162235571.jpg",
"companyId": 0,
"pic": 18548761162235571,
"picUrl": "http://p1.music.126.net/BJgUd9aD9gpougZFASRTTw==/18548761162235571.jpg",
"publishTime": 1482249600007,
"description": "",
"tags": "",
"company": "StreetVoice",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 1,
"copyrightId": 36016,
"commentThreadId": "R_AL_3_35069014",
"artists": [{
"name": "赵雷",
"id": 6731,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "18548761162235571"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 271092,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_447925066",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1272627523,
"size": 10846085,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 271092,
"volumeDelta": -2.65076E-4,
"dfsId_str": null
},
"mMusic": {
"name": null,
"id": 1272627524,
"size": 5423065,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 271092,
"volumeDelta": -2.65076E-4,
"dfsId_str": null
},
"lMusic": {
"name": null,
"id": 1272627525,
"size": 3253857,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 271092,
"volumeDelta": -2.65076E-4,
"dfsId_str": null
},
"bMusic": {
"name": null,
"id": 1272627525,
"size": 3253857,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 271092,
"volumeDelta": -2.65076E-4,
"dfsId_str": null
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5895025
}, {
"name": "Shape of You",
"id": 451703096,
"position": 1,
"alias": [],
"status": 0,
"fee": 8,
"copyrightId": 7002,
"disc": "1",
"no": 1,
"artists": [{
"name": "Ed Sheeran",
"id": 33184,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "Shape Of You",
"id": 35114127,
"type": "EP/Single",
"size": 2,
"picId": 18832435162240436,
"blurPicUrl": "http://p1.music.126.net/5Zs51JS6cQzvQclW5u_J1g==/18832435162240436.jpg",
"companyId": 0,
"pic": 18832435162240436,
"picUrl": "http://p1.music.126.net/5Zs51JS6cQzvQclW5u_J1g==/18832435162240436.jpg",
"publishTime": 1483632000007,
"description": "",
"tags": "",
"company": "华纳唱片",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 3,
"copyrightId": 7002,
"commentThreadId": "R_AL_3_35114127",
"artists": [{
"name": "Ed Sheeran",
"id": 33184,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null,
"picId_str": "18832435162240436"
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 231037,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_451703096",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1272480451,
"size": 9244256,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 231037,
"volumeDelta": -3.94,
"dfsId_str": null
},
"mMusic": {
"name": null,
"id": 1272480452,
"size": 4622151,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 231037,
"volumeDelta": -3.51,
"dfsId_str": null
},
"lMusic": {
"name": null,
"id": 1272480453,
"size": 2773308,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 231037,
"volumeDelta": -3.53,
"dfsId_str": null
},
"bMusic": {
"name": null,
"id": 1272480453,
"size": 2773308,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 231037,
"volumeDelta": -3.53,
"dfsId_str": null
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5439044
}, {
"name": "我好像在哪见过你",
"id": 417859631,
"position": 3,
"alias": ["电影《精灵王座》主题曲"],
"status": 0,
"fee": 0,
"copyrightId": 14026,
"disc": "1",
"no": 3,
"artists": [{
"name": "薛之谦",
"id": 5781,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "初学者",
"id": 34780271,
"type": "专辑",
"size": 10,
"picId": 1369991500930171,
"blurPicUrl": "http://p1.music.126.net/hti_a0LADoFMBHvOBwAtRA==/1369991500930171.jpg",
"companyId": 0,
"pic": 1369991500930171,
"picUrl": "http://p1.music.126.net/hti_a0LADoFMBHvOBwAtRA==/1369991500930171.jpg",
"publishTime": 1468771200007,
"description": "",
"tags": "",
"company": "海蝶",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 0,
"copyrightId": 14026,
"commentThreadId": "R_AL_3_34780271",
"artists": [{
"name": "薛之谦",
"id": 5781,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 279145,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_417859631",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1214810504,
"size": 11168958,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 279145,
"volumeDelta": -2.65076E-4
},
"mMusic": {
"name": null,
"id": 1214810505,
"size": 5584502,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 279145,
"volumeDelta": -2.65076E-4
},
"lMusic": {
"name": null,
"id": 1214810506,
"size": 3350719,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 279145,
"volumeDelta": -2.65076E-4
},
"bMusic": {
"name": null,
"id": 1214810506,
"size": 3350719,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 279145,
"volumeDelta": -2.65076E-4
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5342354
}, {
"name": "Still Got It ",
"id": 416700171,
"position": 1,
"alias": [],
"status": 0,
"fee": 0,
"copyrightId": 0,
"disc": "1",
"no": 1,
"artists": [{
"name": "Usher",
"id": 45564,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}, {
"name": "Migos",
"id": 776824,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "Still Got It",
"id": 34726659,
"type": "EP/Single",
"size": 1,
"picId": 3442570909054815,
"blurPicUrl": "http://p1.music.126.net/JqMXnIiVyVSEtbk9Oi5xPQ==/3442570909054815.jpg",
"companyId": 0,
"pic": 3442570909054815,
"picUrl": "http://p1.music.126.net/JqMXnIiVyVSEtbk9Oi5xPQ==/3442570909054815.jpg",
"publishTime": 1465142400007,
"description": "",
"tags": "",
"company": "Wild Ginger",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 0,
"copyrightId": 0,
"commentThreadId": "R_AL_3_34726659",
"artists": [{
"name": "Usher",
"id": 45564,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null
},
"starred": false,
"popularity": 50.0,
"score": 50,
"starredNum": 0,
"duration": 213019,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_416700171",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1212146916,
"size": 8521186,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 213019,
"volumeDelta": -2.65076E-4
},
"mMusic": {
"name": null,
"id": 1212146918,
"size": 4260615,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 213019,
"volumeDelta": -2.65076E-4
},
"lMusic": {
"name": null,
"id": 1212146919,
"size": 2556386,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 213019,
"volumeDelta": -2.65076E-4
},
"bMusic": {
"name": null,
"id": 1212146919,
"size": 2556386,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 213019,
"volumeDelta": -2.65076E-4
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 0
}, {
"name": "All for One",
"id": 412911234,
"position": 1,
"alias": [],
"status": 0,
"fee": 8,
"copyrightId": 7003,
"disc": "1",
"no": 1,
"artists": [{
"name": "The Stone Roses",
"id": 102012,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "All for One",
"id": 34702052,
"type": "EP/Single",
"size": 1,
"picId": 1400777823776478,
"blurPicUrl": "http://p1.music.126.net/WCivGASQol8CHD6t8SPtPA==/1400777823776478.jpg",
"companyId": 0,
"pic": 1400777823776478,
"picUrl": "http://p1.music.126.net/WCivGASQol8CHD6t8SPtPA==/1400777823776478.jpg",
"publishTime": 1463068800007,
"description": "",
"tags": "",
"company": "环球唱片",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 3,
"copyrightId": 564038,
"commentThreadId": "R_AL_3_34702052",
"artists": [{
"name": "The Stone Roses",
"id": 102012,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null
},
"starred": false,
"popularity": 55.0,
"score": 55,
"starredNum": 0,
"duration": 216409,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_412911234",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 0,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 1206752055,
"size": 8657022,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 216409,
"volumeDelta": -0.21
},
"mMusic": {
"name": null,
"id": 1206752057,
"size": 4328533,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 216409,
"volumeDelta": 0.21
},
"lMusic": {
"name": null,
"id": 1206752058,
"size": 2597137,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 216409,
"volumeDelta": -2.65076E-4
},
"bMusic": {
"name": null,
"id": 1206752058,
"size": 2597137,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 216409,
"volumeDelta": -2.65076E-4
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 0
}, {
"name": "Shimotsui Bushi",
"id": 28302536,
"position": 2,
"alias": [],
"status": 0,
"fee": 0,
"copyrightId": 663018,
"disc": "1",
"no": 2,
"artists": [{
"name": "OMODAKA",
"id": 20890,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "Bridge Song",
"id": 2767874,
"type": "专辑",
"size": 12,
"picId": 6031920790092738,
"blurPicUrl": "http://p1.music.126.net/KuicLkZZeUOh9u-v0wpd_A==/6031920790092738.jpg",
"companyId": 0,
"pic": 6031920790092738,
"picUrl": "http://p1.music.126.net/KuicLkZZeUOh9u-v0wpd_A==/6031920790092738.jpg",
"publishTime": 1394553600007,
"description": "",
"tags": "",
"company": "Far East Recording",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 0,
"copyrightId": 0,
"commentThreadId": "R_AL_3_2767874",
"artists": [{
"name": "OMODAKA",
"id": 20890,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null
},
"starred": false,
"popularity": 45.0,
"score": 45,
"starredNum": 0,
"duration": 282000,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": "",
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_28302536",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": "Shimotsui Bushi",
"id": 46513025,
"size": 11358974,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 282000,
"volumeDelta": -4.84
},
"mMusic": {
"name": "Shimotsui Bushi",
"id": 46513026,
"size": 5699808,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 282000,
"volumeDelta": -4.4
},
"lMusic": {
"name": "Shimotsui Bushi",
"id": 46513027,
"size": 3436141,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 282000,
"volumeDelta": -4.48
},
"bMusic": {
"name": "Shimotsui Bushi",
"id": 46513027,
"size": 3436141,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 282000,
"volumeDelta": -4.48
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 0
}, {
"name": "A thousand miles(At Yokohama Stadium) - Live",
"id": 31704050,
"position": 1,
"alias": [],
"status": 0,
"fee": 0,
"copyrightId": 0,
"disc": "",
"no": 1,
"artists": [{
"name": "ONE OK ROCK",
"id": 20878,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "最新热歌慢摇96",
"id": 3120092,
"type": null,
"size": 104,
"picId": 3384296791681736,
"blurPicUrl": "http://p1.music.126.net/mGoCYYbELhOYlXnXqkhGng==/3384296791681736.jpg",
"companyId": 0,
"pic": 3384296791681736,
"picUrl": "http://p1.music.126.net/mGoCYYbELhOYlXnXqkhGng==/3384296791681736.jpg",
"publishTime": 1388505600004,
"description": "",
"tags": "",
"company": "",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 2,
"copyrightId": 0,
"commentThreadId": "R_AL_3_3120092",
"artists": [{
"name": "Various Artists",
"id": 104700,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null
},
"starred": false,
"popularity": 95.0,
"score": 95,
"starredNum": 0,
"duration": 291000,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_31704050",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": "Live",
"hMusic": {
"name": null,
"id": 61156468,
"size": 6997281,
"extension": "mp3",
"sr": 48000,
"dfsId": 0,
"bitrate": 192000,
"playTime": 291000,
"volumeDelta": 0.0
},
"mMusic": {
"name": null,
"id": 61156469,
"size": 5831797,
"extension": "mp3",
"sr": 48000,
"dfsId": 0,
"bitrate": 160000,
"playTime": 291000,
"volumeDelta": 0.0
},
"lMusic": {
"name": null,
"id": 61156470,
"size": 3499189,
"extension": "mp3",
"sr": 48000,
"dfsId": 0,
"bitrate": 96000,
"playTime": 291000,
"volumeDelta": 0.0
},
"bMusic": {
"name": null,
"id": 61156470,
"size": 3499189,
"extension": "mp3",
"sr": 48000,
"dfsId": 0,
"bitrate": 96000,
"playTime": 291000,
"volumeDelta": 0.0
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 5557195
}, {
"name": "Try",
"id": 28639182,
"position": 4,
"alias": [],
"status": 0,
"fee": 8,
"copyrightId": 7003,
"disc": "1",
"no": 4,
"artists": [{
"name": "Colbie Caillat",
"id": 50890,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "Gypsy Heart Side A",
"id": 2860926,
"type": "EP/Single",
"size": 5,
"picId": 5930765720512200,
"blurPicUrl": "http://p1.music.126.net/QBQvSe84znaWIm2PrhQ_ng==/5930765720512200.jpg",
"companyId": 0,
"pic": 5930765720512200,
"picUrl": "http://p1.music.126.net/QBQvSe84znaWIm2PrhQ_ng==/5930765720512200.jpg",
"publishTime": 1402243200007,
"description": "",
"tags": "",
"company": "环球唱片",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 0,
"copyrightId": 7003,
"commentThreadId": "R_AL_3_2860926",
"artists": [{
"name": "Colbie Caillat",
"id": 50890,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 224626,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": "",
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_28639182",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": "Try",
"id": 47485427,
"size": 9014551,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 224626,
"volumeDelta": 0.32
},
"mMusic": {
"name": "Try",
"id": 47485428,
"size": 4520968,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 224626,
"volumeDelta": 0.77
},
"lMusic": {
"name": "Try",
"id": 47485429,
"size": 2723535,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 224626,
"volumeDelta": 0.631285
},
"bMusic": {
"name": "Try",
"id": 47485429,
"size": 2723535,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 224626,
"volumeDelta": 0.631285
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 288118
}, {
"name": "Adventure of a Lifetime",
"id": 36308623,
"position": 5,
"alias": [],
"status": 0,
"fee": 8,
"copyrightId": 14002,
"disc": "1",
"no": 1,
"artists": [{
"name": "Coldplay",
"id": 89365,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "Adventure of a Lifetime",
"id": 3395162,
"type": "EP/Single",
"size": 2,
"picId": 1365593442720243,
"blurPicUrl": "http://p1.music.126.net/a6OatooKiVqJf4ia4RG2ZQ==/1365593442720243.jpg",
"companyId": 0,
"pic": 1365593442720243,
"picUrl": "http://p1.music.126.net/a6OatooKiVqJf4ia4RG2ZQ==/1365593442720243.jpg",
"publishTime": 1446739200007,
"description": "",
"tags": "",
"company": "华纳PLG",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": [],
"status": 3,
"copyrightId": 14002,
"commentThreadId": "R_AL_3_3395162",
"artists": [{
"name": "Coldplay",
"id": 89365,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 263848,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_36308623",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 1,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 110969622,
"size": 10554557,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 263848,
"volumeDelta": -2.48
},
"mMusic": {
"name": null,
"id": 110969624,
"size": 5277301,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 263848,
"volumeDelta": -2.07
},
"lMusic": {
"name": null,
"id": 110969625,
"size": 3166398,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 263848,
"volumeDelta": -2.1
},
"bMusic": {
"name": null,
"id": 110969625,
"size": 3166398,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 263848,
"volumeDelta": -2.1
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 509032
}, {
"name": "夜の国",
"id": 33761678,
"position": 1,
"alias": ["TV动画《黑街》片尾曲 / TVアニメ「GANGSTA」EDテーマ"],
"status": 0,
"fee": 0,
"copyrightId": 663018,
"disc": "1",
"no": 1,
"artists": [{
"name": "Annabel",
"id": 16083,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "夜の国",
"id": 3257226,
"type": "EP/Single",
"size": 4,
"picId": 7982454419372966,
"blurPicUrl": "http://p1.music.126.net/OYUAuPuQk52YLWmUw0K5GQ==/7982454419372966.jpg",
"companyId": 0,
"pic": 7982454419372966,
"picUrl": "http://p1.music.126.net/OYUAuPuQk52YLWmUw0K5GQ==/7982454419372966.jpg",
"publishTime": 1440518400000,
"description": "",
"tags": "",
"company": "Lantis",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": ["TV动画《黑街》ED专辑"],
"status": 0,
"copyrightId": 0,
"commentThreadId": "R_AL_3_3257226",
"artists": [{
"name": "Annabel",
"id": 16083,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null
},
"starred": false,
"popularity": 100.0,
"score": 100,
"starredNum": 0,
"duration": 242106,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_33761678",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 102701202,
"size": 9687292,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 242106,
"volumeDelta": -0.75
},
"mMusic": {
"name": null,
"id": 102701203,
"size": 4843668,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 242106,
"volumeDelta": -0.32
},
"lMusic": {
"name": null,
"id": 102701204,
"size": 2906218,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 242106,
"volumeDelta": -0.35
},
"bMusic": {
"name": null,
"id": 102701204,
"size": 2906218,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 242106,
"volumeDelta": -0.35
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 0
}, {
"name": "ターミナル",
"id": 33761679,
"position": 2,
"alias": [],
"status": 0,
"fee": 0,
"copyrightId": 663018,
"disc": "1",
"no": 2,
"artists": [{
"name": "Annabel",
"id": 16083,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"album": {
"name": "夜の国",
"id": 3257226,
"type": "EP/Single",
"size": 4,
"picId": 7982454419372966,
"blurPicUrl": "http://p1.music.126.net/OYUAuPuQk52YLWmUw0K5GQ==/7982454419372966.jpg",
"companyId": 0,
"pic": 7982454419372966,
"picUrl": "http://p1.music.126.net/OYUAuPuQk52YLWmUw0K5GQ==/7982454419372966.jpg",
"publishTime": 1440518400000,
"description": "",
"tags": "",
"company": "Lantis",
"briefDesc": "",
"artist": {
"name": "",
"id": 0,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
},
"songs": [],
"alias": ["TV动画《黑街》ED专辑"],
"status": 0,
"copyrightId": 0,
"commentThreadId": "R_AL_3_3257226",
"artists": [{
"name": "Annabel",
"id": 16083,
"picId": 0,
"img1v1Id": 0,
"briefDesc": "",
"picUrl": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"img1v1Url": "http://p1.music.126.net/6y-UleORITEDbvrOLV0Q8A==/5639395138885805.jpg",
"albumSize": 0,
"alias": [],
"trans": "",
"musicSize": 0
}],
"subType": "录音室版",
"transName": null
},
"starred": false,
"popularity": 85.0,
"score": 85,
"starredNum": 0,
"duration": 256053,
"playedNum": 0,
"dayPlays": 0,
"hearTime": 0,
"ringtone": null,
"crbt": null,
"audition": null,
"copyFrom": "",
"commentThreadId": "R_SO_4_33761679",
"rtUrl": null,
"ftype": 0,
"rtUrls": [],
"copyright": 2,
"transName": null,
"sign": null,
"hMusic": {
"name": null,
"id": 102701205,
"size": 10245267,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 320000,
"playTime": 256053,
"volumeDelta": -0.9
},
"mMusic": {
"name": null,
"id": 102701206,
"size": 5122656,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 160000,
"playTime": 256053,
"volumeDelta": -0.51
},
"lMusic": {
"name": null,
"id": 102701207,
"size": 3073611,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 256053,
"volumeDelta": -0.58
},
"bMusic": {
"name": null,
"id": 102701207,
"size": 3073611,
"extension": "mp3",
"sr": 44100,
"dfsId": 0,
"bitrate": 96000,
"playTime": 256053,
"volumeDelta": -0.58
},
"mp3Url": null,
"rtype": 0,
"rurl": null,
"mvid": 0
}],
"cloudTrackCount": 0,
"subscribedCount": 9387,
"totalDuration": 0,
"trackCount": 24,
"tags": [],
"ordered": true,
"userId": 1,
"coverImgUrl": "http://p2.music.126.net/TpRmDebucTTX5UE-7W8png==/19016053603009403.jpg",
"updateTime": 1537006513167,
"commentThreadId": "A_PL_0_1",
"coverImgId": 19016053603009403,
"privacy": 0,
"playCount": 2004106,
"specialType": 5,
"anonimous": false,
"newImported": false,
"trackUpdateTime": 1539056463198,
"adType": 0,
"trackNumberUpdateTime": 1537006502047,
"status": 0,
"description": null,
"createTime": 1356607835546,
"highQuality": false,
"name": "网易云音乐喜欢的音乐",
"id": 1,
"shareCount": 158,
"coverImgId_str": "19016053603009403",
"commentCount": 2870
},
"code": 200
}'''
# def details_playlist(self, id):
# """
# 歌单详情。
# """
# url = 'http://music.163.com/api/playlist/detail?id=%d' % (id)
# html = self.httpRequest(url, method="GET", cookies=self.cookies)
# return html['result']
def search(self, s, offset=0, limit=100, total='true', stype=1):
"""
搜索.
type类型: 单曲(1), 专辑(10), 歌手(100), 歌单(1000), 用户(1002)
"""
url = 'http://music.163.com/api/search/get/web'
data = {
's': s,
'offset': offset,
'total': total,
'limit': limit,
'type': stype
}
html = self.httpRequest(url, method='POST', data=data, cookies=self.cookies)
try:
return html[1]['result']
except:
return "Not Found!"
def details_search(self, id):
"""
搜索结果详情,返回歌曲URL。
"""
id = str(id)
url = "http://music.163.com//api/song/detail/?id=%s&ids=%s" % (id, urllib.parse.quote('[%s]' % (id)))
html = self.httpRequest(url, method='GET', cookies=self.cookies)
return html['songs'][0]['mp3Url']
def newsong(self, areaID=0, offset=0, total='true', limit=100):
"""
最新音乐--新歌速递。
areaID(0全部, 9华语, 96欧美, 16韩国, 8日本。)
"""
url = 'http://music.163.com/api/discovery/new/songs?areaId=%d&offset=%d&total=%s&limit=%d' %\
(areaID, offset, total, limit)
html = self.httpRequest(url, method='GET', cookies=self.cookies)
return html['data']
def fnewsong(self, year=2015, month=4, area='ALL'):
"""
最新音乐--新碟上架。
area(ALL全部, ZH华语, EA欧美, KR韩国, 日本JP)
"""
url = 'http://music.163.com/api/discovery/new/albums/area?year=%d&month=%d&area=%s&type=hot&offset=0&total=true&limit=20&rcmd=true' \
% (year, month, area)
html = self.httpRequest(url, method="GET", cookies=self.cookies)
return html['monthData']
if __name__ == '__main__':
main = WebApi()
req = main.newsong()
for i in req:
print(i)
| 24.189262
| 141
| 0.534776
| 8,806
| 90,105
| 5.445378
| 0.103679
| 0.036203
| 0.049779
| 0.045003
| 0.807057
| 0.791396
| 0.766329
| 0.753399
| 0.718886
| 0.64917
| 0
| 0.195984
| 0.246734
| 90,105
| 3,724
| 142
| 24.195757
| 0.510498
| 0.006914
| 0
| 0.793395
| 0
| 0.05131
| 0.961767
| 0.003332
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003002
| false
| 0.000546
| 0.001365
| 0.000273
| 0.008461
| 0.000273
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b7c3b3147fb781ff09d22d81228c23fe94669d14
| 51,688
|
py
|
Python
|
sdk/python/pulumi_mongodbatlas/network_peering.py
|
pulumi/pulumi-mongodbatlas
|
0d5c085dcfd871b56fb4cf582620260b70caa07a
|
[
"ECL-2.0",
"Apache-2.0"
] | 9
|
2020-04-28T19:12:30.000Z
|
2022-03-22T23:04:46.000Z
|
sdk/python/pulumi_mongodbatlas/network_peering.py
|
pulumi/pulumi-mongodbatlas
|
0d5c085dcfd871b56fb4cf582620260b70caa07a
|
[
"ECL-2.0",
"Apache-2.0"
] | 59
|
2020-06-12T12:12:52.000Z
|
2022-03-28T18:14:50.000Z
|
sdk/python/pulumi_mongodbatlas/network_peering.py
|
pulumi/pulumi-mongodbatlas
|
0d5c085dcfd871b56fb4cf582620260b70caa07a
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-09-25T21:22:08.000Z
|
2021-08-30T20:06:18.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['NetworkPeeringArgs', 'NetworkPeering']
@pulumi.input_type
class NetworkPeeringArgs:
def __init__(__self__, *,
container_id: pulumi.Input[str],
project_id: pulumi.Input[str],
provider_name: pulumi.Input[str],
accepter_region_name: Optional[pulumi.Input[str]] = None,
atlas_cidr_block: Optional[pulumi.Input[str]] = None,
atlas_gcp_project_id: Optional[pulumi.Input[str]] = None,
atlas_vpc_name: Optional[pulumi.Input[str]] = None,
aws_account_id: Optional[pulumi.Input[str]] = None,
azure_directory_id: Optional[pulumi.Input[str]] = None,
azure_subscription_id: Optional[pulumi.Input[str]] = None,
gcp_project_id: Optional[pulumi.Input[str]] = None,
network_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
route_table_cidr_block: Optional[pulumi.Input[str]] = None,
vnet_name: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a NetworkPeering resource.
:param pulumi.Input[str] container_id: Unique identifier of the MongoDB Atlas container for the provider (GCP) or provider/region (AWS, AZURE). You can create an MongoDB Atlas container using the network_container resource or it can be obtained from the cluster returned values if a cluster has been created before the first container.
:param pulumi.Input[str] project_id: The unique ID for the MongoDB Atlas project to create the database user.
:param pulumi.Input[str] provider_name: Cloud provider to whom the peering connection is being made. (Possible Values `AWS`, `AZURE`, `GCP`).
:param pulumi.Input[str] accepter_region_name: Specifies the AWS region where the peer VPC resides. For complete lists of supported regions, see [Amazon Web Services](https://docs.atlas.mongodb.com/reference/amazon-aws/).
:param pulumi.Input[str] atlas_gcp_project_id: The Atlas GCP Project ID for the GCP VPC used by your atlas cluster that it is need to set up the reciprocal connection.
:param pulumi.Input[str] aws_account_id: AWS Account ID of the owner of the peer VPC.
:param pulumi.Input[str] azure_directory_id: Unique identifier for an Azure AD directory.
:param pulumi.Input[str] azure_subscription_id: Unique identifier of the Azure subscription in which the VNet resides.
:param pulumi.Input[str] gcp_project_id: GCP project ID of the owner of the network peer.
:param pulumi.Input[str] network_name: Name of the network peer to which Atlas connects.
:param pulumi.Input[str] resource_group_name: Name of your Azure resource group.
:param pulumi.Input[str] route_table_cidr_block: AWS VPC CIDR block or subnet.
:param pulumi.Input[str] vnet_name: Name of your Azure VNet.
:param pulumi.Input[str] vpc_id: Unique identifier of the AWS peer VPC (Note: this is **not** the same as the Atlas AWS VPC that is returned by the network_container resource).
"""
pulumi.set(__self__, "container_id", container_id)
pulumi.set(__self__, "project_id", project_id)
pulumi.set(__self__, "provider_name", provider_name)
if accepter_region_name is not None:
pulumi.set(__self__, "accepter_region_name", accepter_region_name)
if atlas_cidr_block is not None:
pulumi.set(__self__, "atlas_cidr_block", atlas_cidr_block)
if atlas_gcp_project_id is not None:
pulumi.set(__self__, "atlas_gcp_project_id", atlas_gcp_project_id)
if atlas_vpc_name is not None:
pulumi.set(__self__, "atlas_vpc_name", atlas_vpc_name)
if aws_account_id is not None:
pulumi.set(__self__, "aws_account_id", aws_account_id)
if azure_directory_id is not None:
pulumi.set(__self__, "azure_directory_id", azure_directory_id)
if azure_subscription_id is not None:
pulumi.set(__self__, "azure_subscription_id", azure_subscription_id)
if gcp_project_id is not None:
pulumi.set(__self__, "gcp_project_id", gcp_project_id)
if network_name is not None:
pulumi.set(__self__, "network_name", network_name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if route_table_cidr_block is not None:
pulumi.set(__self__, "route_table_cidr_block", route_table_cidr_block)
if vnet_name is not None:
pulumi.set(__self__, "vnet_name", vnet_name)
if vpc_id is not None:
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter(name="containerId")
def container_id(self) -> pulumi.Input[str]:
"""
Unique identifier of the MongoDB Atlas container for the provider (GCP) or provider/region (AWS, AZURE). You can create an MongoDB Atlas container using the network_container resource or it can be obtained from the cluster returned values if a cluster has been created before the first container.
"""
return pulumi.get(self, "container_id")
@container_id.setter
def container_id(self, value: pulumi.Input[str]):
pulumi.set(self, "container_id", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Input[str]:
"""
The unique ID for the MongoDB Atlas project to create the database user.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: pulumi.Input[str]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter(name="providerName")
def provider_name(self) -> pulumi.Input[str]:
"""
Cloud provider to whom the peering connection is being made. (Possible Values `AWS`, `AZURE`, `GCP`).
"""
return pulumi.get(self, "provider_name")
@provider_name.setter
def provider_name(self, value: pulumi.Input[str]):
pulumi.set(self, "provider_name", value)
@property
@pulumi.getter(name="accepterRegionName")
def accepter_region_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the AWS region where the peer VPC resides. For complete lists of supported regions, see [Amazon Web Services](https://docs.atlas.mongodb.com/reference/amazon-aws/).
"""
return pulumi.get(self, "accepter_region_name")
@accepter_region_name.setter
def accepter_region_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "accepter_region_name", value)
@property
@pulumi.getter(name="atlasCidrBlock")
def atlas_cidr_block(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "atlas_cidr_block")
@atlas_cidr_block.setter
def atlas_cidr_block(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "atlas_cidr_block", value)
@property
@pulumi.getter(name="atlasGcpProjectId")
def atlas_gcp_project_id(self) -> Optional[pulumi.Input[str]]:
"""
The Atlas GCP Project ID for the GCP VPC used by your atlas cluster that it is need to set up the reciprocal connection.
"""
return pulumi.get(self, "atlas_gcp_project_id")
@atlas_gcp_project_id.setter
def atlas_gcp_project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "atlas_gcp_project_id", value)
@property
@pulumi.getter(name="atlasVpcName")
def atlas_vpc_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "atlas_vpc_name")
@atlas_vpc_name.setter
def atlas_vpc_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "atlas_vpc_name", value)
@property
@pulumi.getter(name="awsAccountId")
def aws_account_id(self) -> Optional[pulumi.Input[str]]:
"""
AWS Account ID of the owner of the peer VPC.
"""
return pulumi.get(self, "aws_account_id")
@aws_account_id.setter
def aws_account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aws_account_id", value)
@property
@pulumi.getter(name="azureDirectoryId")
def azure_directory_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier for an Azure AD directory.
"""
return pulumi.get(self, "azure_directory_id")
@azure_directory_id.setter
def azure_directory_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "azure_directory_id", value)
@property
@pulumi.getter(name="azureSubscriptionId")
def azure_subscription_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the Azure subscription in which the VNet resides.
"""
return pulumi.get(self, "azure_subscription_id")
@azure_subscription_id.setter
def azure_subscription_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "azure_subscription_id", value)
@property
@pulumi.getter(name="gcpProjectId")
def gcp_project_id(self) -> Optional[pulumi.Input[str]]:
"""
GCP project ID of the owner of the network peer.
"""
return pulumi.get(self, "gcp_project_id")
@gcp_project_id.setter
def gcp_project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gcp_project_id", value)
@property
@pulumi.getter(name="networkName")
def network_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the network peer to which Atlas connects.
"""
return pulumi.get(self, "network_name")
@network_name.setter
def network_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of your Azure resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="routeTableCidrBlock")
def route_table_cidr_block(self) -> Optional[pulumi.Input[str]]:
"""
AWS VPC CIDR block or subnet.
"""
return pulumi.get(self, "route_table_cidr_block")
@route_table_cidr_block.setter
def route_table_cidr_block(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "route_table_cidr_block", value)
@property
@pulumi.getter(name="vnetName")
def vnet_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of your Azure VNet.
"""
return pulumi.get(self, "vnet_name")
@vnet_name.setter
def vnet_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vnet_name", value)
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the AWS peer VPC (Note: this is **not** the same as the Atlas AWS VPC that is returned by the network_container resource).
"""
return pulumi.get(self, "vpc_id")
@vpc_id.setter
def vpc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vpc_id", value)
@pulumi.input_type
class _NetworkPeeringState:
def __init__(__self__, *,
accepter_region_name: Optional[pulumi.Input[str]] = None,
atlas_cidr_block: Optional[pulumi.Input[str]] = None,
atlas_gcp_project_id: Optional[pulumi.Input[str]] = None,
atlas_id: Optional[pulumi.Input[str]] = None,
atlas_vpc_name: Optional[pulumi.Input[str]] = None,
aws_account_id: Optional[pulumi.Input[str]] = None,
azure_directory_id: Optional[pulumi.Input[str]] = None,
azure_subscription_id: Optional[pulumi.Input[str]] = None,
connection_id: Optional[pulumi.Input[str]] = None,
container_id: Optional[pulumi.Input[str]] = None,
error_message: Optional[pulumi.Input[str]] = None,
error_state: Optional[pulumi.Input[str]] = None,
error_state_name: Optional[pulumi.Input[str]] = None,
gcp_project_id: Optional[pulumi.Input[str]] = None,
network_name: Optional[pulumi.Input[str]] = None,
peer_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
provider_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
route_table_cidr_block: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
status_name: Optional[pulumi.Input[str]] = None,
vnet_name: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering NetworkPeering resources.
:param pulumi.Input[str] accepter_region_name: Specifies the AWS region where the peer VPC resides. For complete lists of supported regions, see [Amazon Web Services](https://docs.atlas.mongodb.com/reference/amazon-aws/).
:param pulumi.Input[str] atlas_gcp_project_id: The Atlas GCP Project ID for the GCP VPC used by your atlas cluster that it is need to set up the reciprocal connection.
:param pulumi.Input[str] aws_account_id: AWS Account ID of the owner of the peer VPC.
:param pulumi.Input[str] azure_directory_id: Unique identifier for an Azure AD directory.
:param pulumi.Input[str] azure_subscription_id: Unique identifier of the Azure subscription in which the VNet resides.
:param pulumi.Input[str] connection_id: Unique identifier of the Atlas network peering container.
:param pulumi.Input[str] container_id: Unique identifier of the MongoDB Atlas container for the provider (GCP) or provider/region (AWS, AZURE). You can create an MongoDB Atlas container using the network_container resource or it can be obtained from the cluster returned values if a cluster has been created before the first container.
:param pulumi.Input[str] error_message: When `"status" : "FAILED"`, Atlas provides a description of the error.
:param pulumi.Input[str] error_state: Description of the Atlas error when `status` is `Failed`, Otherwise, Atlas returns `null`.
:param pulumi.Input[str] error_state_name: Error state, if any. The VPC peering connection error state value can be one of the following: `REJECTED`, `EXPIRED`, `INVALID_ARGUMENT`.
:param pulumi.Input[str] gcp_project_id: GCP project ID of the owner of the network peer.
:param pulumi.Input[str] network_name: Name of the network peer to which Atlas connects.
:param pulumi.Input[str] peer_id: Unique identifier of the Atlas network peer.
:param pulumi.Input[str] project_id: The unique ID for the MongoDB Atlas project to create the database user.
:param pulumi.Input[str] provider_name: Cloud provider to whom the peering connection is being made. (Possible Values `AWS`, `AZURE`, `GCP`).
:param pulumi.Input[str] resource_group_name: Name of your Azure resource group.
:param pulumi.Input[str] route_table_cidr_block: AWS VPC CIDR block or subnet.
:param pulumi.Input[str] status: Status of the Atlas network peering connection. Azure/GCP: `ADDING_PEER`, `AVAILABLE`, `FAILED`, `DELETING` GCP Only: `WAITING_FOR_USER`.
:param pulumi.Input[str] status_name: (AWS Only) The VPC peering connection status value can be one of the following: `INITIATING`, `PENDING_ACCEPTANCE`, `FAILED`, `FINALIZING`, `AVAILABLE`, `TERMINATING`.
:param pulumi.Input[str] vnet_name: Name of your Azure VNet.
:param pulumi.Input[str] vpc_id: Unique identifier of the AWS peer VPC (Note: this is **not** the same as the Atlas AWS VPC that is returned by the network_container resource).
"""
if accepter_region_name is not None:
pulumi.set(__self__, "accepter_region_name", accepter_region_name)
if atlas_cidr_block is not None:
pulumi.set(__self__, "atlas_cidr_block", atlas_cidr_block)
if atlas_gcp_project_id is not None:
pulumi.set(__self__, "atlas_gcp_project_id", atlas_gcp_project_id)
if atlas_id is not None:
pulumi.set(__self__, "atlas_id", atlas_id)
if atlas_vpc_name is not None:
pulumi.set(__self__, "atlas_vpc_name", atlas_vpc_name)
if aws_account_id is not None:
pulumi.set(__self__, "aws_account_id", aws_account_id)
if azure_directory_id is not None:
pulumi.set(__self__, "azure_directory_id", azure_directory_id)
if azure_subscription_id is not None:
pulumi.set(__self__, "azure_subscription_id", azure_subscription_id)
if connection_id is not None:
pulumi.set(__self__, "connection_id", connection_id)
if container_id is not None:
pulumi.set(__self__, "container_id", container_id)
if error_message is not None:
pulumi.set(__self__, "error_message", error_message)
if error_state is not None:
pulumi.set(__self__, "error_state", error_state)
if error_state_name is not None:
pulumi.set(__self__, "error_state_name", error_state_name)
if gcp_project_id is not None:
pulumi.set(__self__, "gcp_project_id", gcp_project_id)
if network_name is not None:
pulumi.set(__self__, "network_name", network_name)
if peer_id is not None:
pulumi.set(__self__, "peer_id", peer_id)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if provider_name is not None:
pulumi.set(__self__, "provider_name", provider_name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if route_table_cidr_block is not None:
pulumi.set(__self__, "route_table_cidr_block", route_table_cidr_block)
if status is not None:
pulumi.set(__self__, "status", status)
if status_name is not None:
pulumi.set(__self__, "status_name", status_name)
if vnet_name is not None:
pulumi.set(__self__, "vnet_name", vnet_name)
if vpc_id is not None:
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter(name="accepterRegionName")
def accepter_region_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the AWS region where the peer VPC resides. For complete lists of supported regions, see [Amazon Web Services](https://docs.atlas.mongodb.com/reference/amazon-aws/).
"""
return pulumi.get(self, "accepter_region_name")
@accepter_region_name.setter
def accepter_region_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "accepter_region_name", value)
@property
@pulumi.getter(name="atlasCidrBlock")
def atlas_cidr_block(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "atlas_cidr_block")
@atlas_cidr_block.setter
def atlas_cidr_block(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "atlas_cidr_block", value)
@property
@pulumi.getter(name="atlasGcpProjectId")
def atlas_gcp_project_id(self) -> Optional[pulumi.Input[str]]:
"""
The Atlas GCP Project ID for the GCP VPC used by your atlas cluster that it is need to set up the reciprocal connection.
"""
return pulumi.get(self, "atlas_gcp_project_id")
@atlas_gcp_project_id.setter
def atlas_gcp_project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "atlas_gcp_project_id", value)
@property
@pulumi.getter(name="atlasId")
def atlas_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "atlas_id")
@atlas_id.setter
def atlas_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "atlas_id", value)
@property
@pulumi.getter(name="atlasVpcName")
def atlas_vpc_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "atlas_vpc_name")
@atlas_vpc_name.setter
def atlas_vpc_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "atlas_vpc_name", value)
@property
@pulumi.getter(name="awsAccountId")
def aws_account_id(self) -> Optional[pulumi.Input[str]]:
"""
AWS Account ID of the owner of the peer VPC.
"""
return pulumi.get(self, "aws_account_id")
@aws_account_id.setter
def aws_account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aws_account_id", value)
@property
@pulumi.getter(name="azureDirectoryId")
def azure_directory_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier for an Azure AD directory.
"""
return pulumi.get(self, "azure_directory_id")
@azure_directory_id.setter
def azure_directory_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "azure_directory_id", value)
@property
@pulumi.getter(name="azureSubscriptionId")
def azure_subscription_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the Azure subscription in which the VNet resides.
"""
return pulumi.get(self, "azure_subscription_id")
@azure_subscription_id.setter
def azure_subscription_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "azure_subscription_id", value)
@property
@pulumi.getter(name="connectionId")
def connection_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the Atlas network peering container.
"""
return pulumi.get(self, "connection_id")
@connection_id.setter
def connection_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_id", value)
@property
@pulumi.getter(name="containerId")
def container_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the MongoDB Atlas container for the provider (GCP) or provider/region (AWS, AZURE). You can create an MongoDB Atlas container using the network_container resource or it can be obtained from the cluster returned values if a cluster has been created before the first container.
"""
return pulumi.get(self, "container_id")
@container_id.setter
def container_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_id", value)
@property
@pulumi.getter(name="errorMessage")
def error_message(self) -> Optional[pulumi.Input[str]]:
"""
When `"status" : "FAILED"`, Atlas provides a description of the error.
"""
return pulumi.get(self, "error_message")
@error_message.setter
def error_message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "error_message", value)
@property
@pulumi.getter(name="errorState")
def error_state(self) -> Optional[pulumi.Input[str]]:
"""
Description of the Atlas error when `status` is `Failed`, Otherwise, Atlas returns `null`.
"""
return pulumi.get(self, "error_state")
@error_state.setter
def error_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "error_state", value)
@property
@pulumi.getter(name="errorStateName")
def error_state_name(self) -> Optional[pulumi.Input[str]]:
"""
Error state, if any. The VPC peering connection error state value can be one of the following: `REJECTED`, `EXPIRED`, `INVALID_ARGUMENT`.
"""
return pulumi.get(self, "error_state_name")
@error_state_name.setter
def error_state_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "error_state_name", value)
@property
@pulumi.getter(name="gcpProjectId")
def gcp_project_id(self) -> Optional[pulumi.Input[str]]:
"""
GCP project ID of the owner of the network peer.
"""
return pulumi.get(self, "gcp_project_id")
@gcp_project_id.setter
def gcp_project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gcp_project_id", value)
@property
@pulumi.getter(name="networkName")
def network_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the network peer to which Atlas connects.
"""
return pulumi.get(self, "network_name")
@network_name.setter
def network_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_name", value)
@property
@pulumi.getter(name="peerId")
def peer_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the Atlas network peer.
"""
return pulumi.get(self, "peer_id")
@peer_id.setter
def peer_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_id", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
The unique ID for the MongoDB Atlas project to create the database user.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter(name="providerName")
def provider_name(self) -> Optional[pulumi.Input[str]]:
"""
Cloud provider to whom the peering connection is being made. (Possible Values `AWS`, `AZURE`, `GCP`).
"""
return pulumi.get(self, "provider_name")
@provider_name.setter
def provider_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provider_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of your Azure resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="routeTableCidrBlock")
def route_table_cidr_block(self) -> Optional[pulumi.Input[str]]:
"""
AWS VPC CIDR block or subnet.
"""
return pulumi.get(self, "route_table_cidr_block")
@route_table_cidr_block.setter
def route_table_cidr_block(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "route_table_cidr_block", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
Status of the Atlas network peering connection. Azure/GCP: `ADDING_PEER`, `AVAILABLE`, `FAILED`, `DELETING` GCP Only: `WAITING_FOR_USER`.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="statusName")
def status_name(self) -> Optional[pulumi.Input[str]]:
"""
(AWS Only) The VPC peering connection status value can be one of the following: `INITIATING`, `PENDING_ACCEPTANCE`, `FAILED`, `FINALIZING`, `AVAILABLE`, `TERMINATING`.
"""
return pulumi.get(self, "status_name")
@status_name.setter
def status_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status_name", value)
@property
@pulumi.getter(name="vnetName")
def vnet_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of your Azure VNet.
"""
return pulumi.get(self, "vnet_name")
@vnet_name.setter
def vnet_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vnet_name", value)
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the AWS peer VPC (Note: this is **not** the same as the Atlas AWS VPC that is returned by the network_container resource).
"""
return pulumi.get(self, "vpc_id")
@vpc_id.setter
def vpc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vpc_id", value)
class NetworkPeering(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
accepter_region_name: Optional[pulumi.Input[str]] = None,
atlas_cidr_block: Optional[pulumi.Input[str]] = None,
atlas_gcp_project_id: Optional[pulumi.Input[str]] = None,
atlas_vpc_name: Optional[pulumi.Input[str]] = None,
aws_account_id: Optional[pulumi.Input[str]] = None,
azure_directory_id: Optional[pulumi.Input[str]] = None,
azure_subscription_id: Optional[pulumi.Input[str]] = None,
container_id: Optional[pulumi.Input[str]] = None,
gcp_project_id: Optional[pulumi.Input[str]] = None,
network_name: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
provider_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
route_table_cidr_block: Optional[pulumi.Input[str]] = None,
vnet_name: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
## Import
Clusters can be imported using project ID and network peering id, in the format `PROJECTID-PEERID-PROVIDERNAME`, e.g.
```sh
$ pulumi import mongodbatlas:index/networkPeering:NetworkPeering my_peering 1112222b3bf99403840e8934-5cbf563d87d9d67253be590a-AWS
```
See detailed information for arguments and attributes[MongoDB API Network Peering Connection](https://docs.atlas.mongodb.com/reference/api/vpc-create-peering-connection/)
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] accepter_region_name: Specifies the AWS region where the peer VPC resides. For complete lists of supported regions, see [Amazon Web Services](https://docs.atlas.mongodb.com/reference/amazon-aws/).
:param pulumi.Input[str] atlas_gcp_project_id: The Atlas GCP Project ID for the GCP VPC used by your atlas cluster that it is need to set up the reciprocal connection.
:param pulumi.Input[str] aws_account_id: AWS Account ID of the owner of the peer VPC.
:param pulumi.Input[str] azure_directory_id: Unique identifier for an Azure AD directory.
:param pulumi.Input[str] azure_subscription_id: Unique identifier of the Azure subscription in which the VNet resides.
:param pulumi.Input[str] container_id: Unique identifier of the MongoDB Atlas container for the provider (GCP) or provider/region (AWS, AZURE). You can create an MongoDB Atlas container using the network_container resource or it can be obtained from the cluster returned values if a cluster has been created before the first container.
:param pulumi.Input[str] gcp_project_id: GCP project ID of the owner of the network peer.
:param pulumi.Input[str] network_name: Name of the network peer to which Atlas connects.
:param pulumi.Input[str] project_id: The unique ID for the MongoDB Atlas project to create the database user.
:param pulumi.Input[str] provider_name: Cloud provider to whom the peering connection is being made. (Possible Values `AWS`, `AZURE`, `GCP`).
:param pulumi.Input[str] resource_group_name: Name of your Azure resource group.
:param pulumi.Input[str] route_table_cidr_block: AWS VPC CIDR block or subnet.
:param pulumi.Input[str] vnet_name: Name of your Azure VNet.
:param pulumi.Input[str] vpc_id: Unique identifier of the AWS peer VPC (Note: this is **not** the same as the Atlas AWS VPC that is returned by the network_container resource).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NetworkPeeringArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Clusters can be imported using project ID and network peering id, in the format `PROJECTID-PEERID-PROVIDERNAME`, e.g.
```sh
$ pulumi import mongodbatlas:index/networkPeering:NetworkPeering my_peering 1112222b3bf99403840e8934-5cbf563d87d9d67253be590a-AWS
```
See detailed information for arguments and attributes[MongoDB API Network Peering Connection](https://docs.atlas.mongodb.com/reference/api/vpc-create-peering-connection/)
:param str resource_name: The name of the resource.
:param NetworkPeeringArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NetworkPeeringArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
accepter_region_name: Optional[pulumi.Input[str]] = None,
atlas_cidr_block: Optional[pulumi.Input[str]] = None,
atlas_gcp_project_id: Optional[pulumi.Input[str]] = None,
atlas_vpc_name: Optional[pulumi.Input[str]] = None,
aws_account_id: Optional[pulumi.Input[str]] = None,
azure_directory_id: Optional[pulumi.Input[str]] = None,
azure_subscription_id: Optional[pulumi.Input[str]] = None,
container_id: Optional[pulumi.Input[str]] = None,
gcp_project_id: Optional[pulumi.Input[str]] = None,
network_name: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
provider_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
route_table_cidr_block: Optional[pulumi.Input[str]] = None,
vnet_name: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NetworkPeeringArgs.__new__(NetworkPeeringArgs)
__props__.__dict__["accepter_region_name"] = accepter_region_name
__props__.__dict__["atlas_cidr_block"] = atlas_cidr_block
__props__.__dict__["atlas_gcp_project_id"] = atlas_gcp_project_id
__props__.__dict__["atlas_vpc_name"] = atlas_vpc_name
__props__.__dict__["aws_account_id"] = aws_account_id
__props__.__dict__["azure_directory_id"] = azure_directory_id
__props__.__dict__["azure_subscription_id"] = azure_subscription_id
if container_id is None and not opts.urn:
raise TypeError("Missing required property 'container_id'")
__props__.__dict__["container_id"] = container_id
__props__.__dict__["gcp_project_id"] = gcp_project_id
__props__.__dict__["network_name"] = network_name
if project_id is None and not opts.urn:
raise TypeError("Missing required property 'project_id'")
__props__.__dict__["project_id"] = project_id
if provider_name is None and not opts.urn:
raise TypeError("Missing required property 'provider_name'")
__props__.__dict__["provider_name"] = provider_name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["route_table_cidr_block"] = route_table_cidr_block
__props__.__dict__["vnet_name"] = vnet_name
__props__.__dict__["vpc_id"] = vpc_id
__props__.__dict__["atlas_id"] = None
__props__.__dict__["connection_id"] = None
__props__.__dict__["error_message"] = None
__props__.__dict__["error_state"] = None
__props__.__dict__["error_state_name"] = None
__props__.__dict__["peer_id"] = None
__props__.__dict__["status"] = None
__props__.__dict__["status_name"] = None
super(NetworkPeering, __self__).__init__(
'mongodbatlas:index/networkPeering:NetworkPeering',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
accepter_region_name: Optional[pulumi.Input[str]] = None,
atlas_cidr_block: Optional[pulumi.Input[str]] = None,
atlas_gcp_project_id: Optional[pulumi.Input[str]] = None,
atlas_id: Optional[pulumi.Input[str]] = None,
atlas_vpc_name: Optional[pulumi.Input[str]] = None,
aws_account_id: Optional[pulumi.Input[str]] = None,
azure_directory_id: Optional[pulumi.Input[str]] = None,
azure_subscription_id: Optional[pulumi.Input[str]] = None,
connection_id: Optional[pulumi.Input[str]] = None,
container_id: Optional[pulumi.Input[str]] = None,
error_message: Optional[pulumi.Input[str]] = None,
error_state: Optional[pulumi.Input[str]] = None,
error_state_name: Optional[pulumi.Input[str]] = None,
gcp_project_id: Optional[pulumi.Input[str]] = None,
network_name: Optional[pulumi.Input[str]] = None,
peer_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
provider_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
route_table_cidr_block: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
status_name: Optional[pulumi.Input[str]] = None,
vnet_name: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None) -> 'NetworkPeering':
"""
Get an existing NetworkPeering resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] accepter_region_name: Specifies the AWS region where the peer VPC resides. For complete lists of supported regions, see [Amazon Web Services](https://docs.atlas.mongodb.com/reference/amazon-aws/).
:param pulumi.Input[str] atlas_gcp_project_id: The Atlas GCP Project ID for the GCP VPC used by your atlas cluster that it is need to set up the reciprocal connection.
:param pulumi.Input[str] aws_account_id: AWS Account ID of the owner of the peer VPC.
:param pulumi.Input[str] azure_directory_id: Unique identifier for an Azure AD directory.
:param pulumi.Input[str] azure_subscription_id: Unique identifier of the Azure subscription in which the VNet resides.
:param pulumi.Input[str] connection_id: Unique identifier of the Atlas network peering container.
:param pulumi.Input[str] container_id: Unique identifier of the MongoDB Atlas container for the provider (GCP) or provider/region (AWS, AZURE). You can create an MongoDB Atlas container using the network_container resource or it can be obtained from the cluster returned values if a cluster has been created before the first container.
:param pulumi.Input[str] error_message: When `"status" : "FAILED"`, Atlas provides a description of the error.
:param pulumi.Input[str] error_state: Description of the Atlas error when `status` is `Failed`, Otherwise, Atlas returns `null`.
:param pulumi.Input[str] error_state_name: Error state, if any. The VPC peering connection error state value can be one of the following: `REJECTED`, `EXPIRED`, `INVALID_ARGUMENT`.
:param pulumi.Input[str] gcp_project_id: GCP project ID of the owner of the network peer.
:param pulumi.Input[str] network_name: Name of the network peer to which Atlas connects.
:param pulumi.Input[str] peer_id: Unique identifier of the Atlas network peer.
:param pulumi.Input[str] project_id: The unique ID for the MongoDB Atlas project to create the database user.
:param pulumi.Input[str] provider_name: Cloud provider to whom the peering connection is being made. (Possible Values `AWS`, `AZURE`, `GCP`).
:param pulumi.Input[str] resource_group_name: Name of your Azure resource group.
:param pulumi.Input[str] route_table_cidr_block: AWS VPC CIDR block or subnet.
:param pulumi.Input[str] status: Status of the Atlas network peering connection. Azure/GCP: `ADDING_PEER`, `AVAILABLE`, `FAILED`, `DELETING` GCP Only: `WAITING_FOR_USER`.
:param pulumi.Input[str] status_name: (AWS Only) The VPC peering connection status value can be one of the following: `INITIATING`, `PENDING_ACCEPTANCE`, `FAILED`, `FINALIZING`, `AVAILABLE`, `TERMINATING`.
:param pulumi.Input[str] vnet_name: Name of your Azure VNet.
:param pulumi.Input[str] vpc_id: Unique identifier of the AWS peer VPC (Note: this is **not** the same as the Atlas AWS VPC that is returned by the network_container resource).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NetworkPeeringState.__new__(_NetworkPeeringState)
__props__.__dict__["accepter_region_name"] = accepter_region_name
__props__.__dict__["atlas_cidr_block"] = atlas_cidr_block
__props__.__dict__["atlas_gcp_project_id"] = atlas_gcp_project_id
__props__.__dict__["atlas_id"] = atlas_id
__props__.__dict__["atlas_vpc_name"] = atlas_vpc_name
__props__.__dict__["aws_account_id"] = aws_account_id
__props__.__dict__["azure_directory_id"] = azure_directory_id
__props__.__dict__["azure_subscription_id"] = azure_subscription_id
__props__.__dict__["connection_id"] = connection_id
__props__.__dict__["container_id"] = container_id
__props__.__dict__["error_message"] = error_message
__props__.__dict__["error_state"] = error_state
__props__.__dict__["error_state_name"] = error_state_name
__props__.__dict__["gcp_project_id"] = gcp_project_id
__props__.__dict__["network_name"] = network_name
__props__.__dict__["peer_id"] = peer_id
__props__.__dict__["project_id"] = project_id
__props__.__dict__["provider_name"] = provider_name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["route_table_cidr_block"] = route_table_cidr_block
__props__.__dict__["status"] = status
__props__.__dict__["status_name"] = status_name
__props__.__dict__["vnet_name"] = vnet_name
__props__.__dict__["vpc_id"] = vpc_id
return NetworkPeering(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accepterRegionName")
def accepter_region_name(self) -> pulumi.Output[str]:
"""
Specifies the AWS region where the peer VPC resides. For complete lists of supported regions, see [Amazon Web Services](https://docs.atlas.mongodb.com/reference/amazon-aws/).
"""
return pulumi.get(self, "accepter_region_name")
@property
@pulumi.getter(name="atlasCidrBlock")
def atlas_cidr_block(self) -> pulumi.Output[str]:
return pulumi.get(self, "atlas_cidr_block")
@property
@pulumi.getter(name="atlasGcpProjectId")
def atlas_gcp_project_id(self) -> pulumi.Output[str]:
"""
The Atlas GCP Project ID for the GCP VPC used by your atlas cluster that it is need to set up the reciprocal connection.
"""
return pulumi.get(self, "atlas_gcp_project_id")
@property
@pulumi.getter(name="atlasId")
def atlas_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "atlas_id")
@property
@pulumi.getter(name="atlasVpcName")
def atlas_vpc_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "atlas_vpc_name")
@property
@pulumi.getter(name="awsAccountId")
def aws_account_id(self) -> pulumi.Output[str]:
"""
AWS Account ID of the owner of the peer VPC.
"""
return pulumi.get(self, "aws_account_id")
@property
@pulumi.getter(name="azureDirectoryId")
def azure_directory_id(self) -> pulumi.Output[str]:
"""
Unique identifier for an Azure AD directory.
"""
return pulumi.get(self, "azure_directory_id")
@property
@pulumi.getter(name="azureSubscriptionId")
def azure_subscription_id(self) -> pulumi.Output[str]:
"""
Unique identifier of the Azure subscription in which the VNet resides.
"""
return pulumi.get(self, "azure_subscription_id")
@property
@pulumi.getter(name="connectionId")
def connection_id(self) -> pulumi.Output[str]:
"""
Unique identifier of the Atlas network peering container.
"""
return pulumi.get(self, "connection_id")
@property
@pulumi.getter(name="containerId")
def container_id(self) -> pulumi.Output[str]:
"""
Unique identifier of the MongoDB Atlas container for the provider (GCP) or provider/region (AWS, AZURE). You can create an MongoDB Atlas container using the network_container resource or it can be obtained from the cluster returned values if a cluster has been created before the first container.
"""
return pulumi.get(self, "container_id")
@property
@pulumi.getter(name="errorMessage")
def error_message(self) -> pulumi.Output[str]:
"""
When `"status" : "FAILED"`, Atlas provides a description of the error.
"""
return pulumi.get(self, "error_message")
@property
@pulumi.getter(name="errorState")
def error_state(self) -> pulumi.Output[str]:
"""
Description of the Atlas error when `status` is `Failed`, Otherwise, Atlas returns `null`.
"""
return pulumi.get(self, "error_state")
@property
@pulumi.getter(name="errorStateName")
def error_state_name(self) -> pulumi.Output[str]:
"""
Error state, if any. The VPC peering connection error state value can be one of the following: `REJECTED`, `EXPIRED`, `INVALID_ARGUMENT`.
"""
return pulumi.get(self, "error_state_name")
@property
@pulumi.getter(name="gcpProjectId")
def gcp_project_id(self) -> pulumi.Output[str]:
"""
GCP project ID of the owner of the network peer.
"""
return pulumi.get(self, "gcp_project_id")
@property
@pulumi.getter(name="networkName")
def network_name(self) -> pulumi.Output[str]:
"""
Name of the network peer to which Atlas connects.
"""
return pulumi.get(self, "network_name")
@property
@pulumi.getter(name="peerId")
def peer_id(self) -> pulumi.Output[str]:
"""
Unique identifier of the Atlas network peer.
"""
return pulumi.get(self, "peer_id")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[str]:
"""
The unique ID for the MongoDB Atlas project to create the database user.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter(name="providerName")
def provider_name(self) -> pulumi.Output[str]:
"""
Cloud provider to whom the peering connection is being made. (Possible Values `AWS`, `AZURE`, `GCP`).
"""
return pulumi.get(self, "provider_name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
Name of your Azure resource group.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="routeTableCidrBlock")
def route_table_cidr_block(self) -> pulumi.Output[str]:
"""
AWS VPC CIDR block or subnet.
"""
return pulumi.get(self, "route_table_cidr_block")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
Status of the Atlas network peering connection. Azure/GCP: `ADDING_PEER`, `AVAILABLE`, `FAILED`, `DELETING` GCP Only: `WAITING_FOR_USER`.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="statusName")
def status_name(self) -> pulumi.Output[str]:
"""
(AWS Only) The VPC peering connection status value can be one of the following: `INITIATING`, `PENDING_ACCEPTANCE`, `FAILED`, `FINALIZING`, `AVAILABLE`, `TERMINATING`.
"""
return pulumi.get(self, "status_name")
@property
@pulumi.getter(name="vnetName")
def vnet_name(self) -> pulumi.Output[str]:
"""
Name of your Azure VNet.
"""
return pulumi.get(self, "vnet_name")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> pulumi.Output[str]:
"""
Unique identifier of the AWS peer VPC (Note: this is **not** the same as the Atlas AWS VPC that is returned by the network_container resource).
"""
return pulumi.get(self, "vpc_id")
| 48.670433
| 343
| 0.667312
| 6,601
| 51,688
| 4.965611
| 0.038782
| 0.083898
| 0.105925
| 0.112087
| 0.933645
| 0.915401
| 0.910489
| 0.89618
| 0.88596
| 0.858198
| 0
| 0.001782
| 0.22926
| 51,688
| 1,061
| 344
| 48.716305
| 0.821
| 0.317965
| 0
| 0.769697
| 1
| 0
| 0.115229
| 0.013129
| 0
| 0
| 0
| 0
| 0
| 1
| 0.168182
| false
| 0.001515
| 0.007576
| 0.012121
| 0.278788
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b7c49fa44df9cceeab88ef2118bcbbcb677be6a2
| 4,958
|
py
|
Python
|
tests/meme_test/filters_test.py
|
tomking/UMass-STEM-Discord-Bot
|
5d8230dd167d18138f77794f97790f492ac81e8c
|
[
"MIT"
] | 5
|
2019-03-27T02:29:18.000Z
|
2020-11-07T23:35:43.000Z
|
tests/meme_test/filters_test.py
|
tomking/UMass-STEM-Discord-Bot
|
5d8230dd167d18138f77794f97790f492ac81e8c
|
[
"MIT"
] | 45
|
2019-04-10T04:27:31.000Z
|
2022-03-11T23:41:59.000Z
|
tests/meme_test/filters_test.py
|
tomking/UMass-STEM-Discord-Bot
|
5d8230dd167d18138f77794f97790f492ac81e8c
|
[
"MIT"
] | 8
|
2019-03-27T03:46:54.000Z
|
2021-08-16T17:03:28.000Z
|
import os
import os.path
import glob
import overlay
import filters
from pathlib import Path
from matplotlib.testing.compare import compare_images
base='filters'
tdir = os.path.join('tests','test_images',base)
refd = os.path.join('tests','reference_images',base)
TOLERANCE = 11.0
def test_intensify():
fname = base+'_intensify.png'
tname = os.path.join(tdir,fname)
rname = os.path.join(refd,fname)
image = overlay.url_to_image('https://cdn.discordapp.com/attachments/501594682820788224/701320190738038837/zoomarun_final.png')
factor = 5
output = filters.intensify_image(image,factor)
output.save(tname)
tsize = os.path.getsize(tname)
print(glob.glob(tname),'[',tsize,'bytes',']')
rsize = os.path.getsize(rname)
print(glob.glob(rname),'[',rsize,'bytes',']')
result = compare_images(rname,tname,tol=TOLERANCE)
if result is not None:
print('result=',result)
assert result is None
def test_highlight():
fname = base+'_highlight.png'
tname = os.path.join(tdir,fname)
rname = os.path.join(refd,fname)
image = overlay.url_to_image('https://cdn.discordapp.com/attachments/501594682820788224/701320190738038837/zoomarun_final.png')
output = filters.highlight_image(image)
output.save(tname)
tsize = os.path.getsize(tname)
print(glob.glob(tname),'[',tsize,'bytes',']')
rsize = os.path.getsize(rname)
print(glob.glob(rname),'[',rsize,'bytes',']')
result = compare_images(rname,tname,tol=TOLERANCE)
if result is not None:
print('result=',result)
assert result is None
def test_customHighlight():
fname = base+'_customHighlight.png'
tname = os.path.join(tdir,fname)
rname = os.path.join(refd,fname)
image = overlay.url_to_image('https://cdn.discordapp.com/attachments/501594682820788224/701320190738038837/zoomarun_final.png')
red = 30
green = 0
blue = 75
output = filters.custom_edge_highlight_image(image, red, green, blue)
output.save(tname)
tsize = os.path.getsize(tname)
print(glob.glob(tname),'[',tsize,'bytes',']')
rsize = os.path.getsize(rname)
print(glob.glob(rname),'[',rsize,'bytes',']')
result = compare_images(rname,tname,tol=TOLERANCE)
if result is not None:
print('result=',result)
assert result is None
def test_mirrory():
fname = base+'_mirrorY.png'
tname = os.path.join(tdir,fname)
rname = os.path.join(refd,fname)
image = overlay.url_to_image('https://cdn.discordapp.com/attachments/501594682820788224/701320190738038837/zoomarun_final.png')
output = filters.mirror_y(image)
output.save(tname)
tsize = os.path.getsize(tname)
print(glob.glob(tname),'[',tsize,'bytes',']')
rsize = os.path.getsize(rname)
print(glob.glob(rname),'[',rsize,'bytes',']')
result = compare_images(rname,tname,tol=TOLERANCE)
if result is not None:
print('result=',result)
assert result is None
def test_mirrorx():
fname = base+'_mirrorX.png'
tname = os.path.join(tdir,fname)
rname = os.path.join(refd,fname)
image = overlay.url_to_image('https://cdn.discordapp.com/attachments/501594682820788224/701320190738038837/zoomarun_final.png')
output = filters.mirror_x(image)
output.save(tname)
tsize = os.path.getsize(tname)
print(glob.glob(tname),'[',tsize,'bytes',']')
rsize = os.path.getsize(rname)
print(glob.glob(rname),'[',rsize,'bytes',']')
result = compare_images(rname,tname,tol=TOLERANCE)
if result is not None:
print('result=',result)
assert result is None
def test_pixelate():
fname = base+'_pixelate.png'
tname = os.path.join(tdir,fname)
rname = os.path.join(refd,fname)
image = overlay.url_to_image('https://cdn.discordapp.com/attachments/501594682820788224/701320190738038837/zoomarun_final.png')
factor = 5
output = filters.pixelate_image(image,factor)
output.save(tname)
tsize = os.path.getsize(tname)
print(glob.glob(tname),'[',tsize,'bytes',']')
rsize = os.path.getsize(rname)
print(glob.glob(rname),'[',rsize,'bytes',']')
result = compare_images(rname,tname,tol=TOLERANCE)
if result is not None:
print('result=',result)
assert result is None
def test_saturate():
fname = base+'_saturate.png'
tname = os.path.join(tdir,fname)
rname = os.path.join(refd,fname)
image = overlay.url_to_image('https://cdn.discordapp.com/attachments/501594682820788224/701320190738038837/zoomarun_final.png')
factor = 5
output = filters.saturate_image(image,factor)
output.save(tname)
tsize = os.path.getsize(tname)
print(glob.glob(tname),'[',tsize,'bytes',']')
rsize = os.path.getsize(rname)
print(glob.glob(rname),'[',rsize,'bytes',']')
result = compare_images(rname,tname,tol=TOLERANCE)
if result is not None:
print('result=',result)
assert result is None
| 28.65896
| 133
| 0.678701
| 644
| 4,958
| 5.141304
| 0.111801
| 0.056176
| 0.048324
| 0.029598
| 0.841136
| 0.841136
| 0.841136
| 0.841136
| 0.841136
| 0.841136
| 0
| 0.064037
| 0.171642
| 4,958
| 173
| 134
| 28.65896
| 0.742148
| 0
| 0
| 0.713115
| 0
| 0
| 0.192378
| 0
| 0
| 0
| 0
| 0
| 0.057377
| 1
| 0.057377
| false
| 0
| 0.057377
| 0
| 0.114754
| 0.172131
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7d37778ecc500021d583c18f58afde0ead6510a
| 12,173
|
py
|
Python
|
tests/types/test_arguments.py
|
joelryan2k/strawberry
|
10af5aaf7265b1a3231654650149a5c25b8a836d
|
[
"MIT"
] | null | null | null |
tests/types/test_arguments.py
|
joelryan2k/strawberry
|
10af5aaf7265b1a3231654650149a5c25b8a836d
|
[
"MIT"
] | null | null | null |
tests/types/test_arguments.py
|
joelryan2k/strawberry
|
10af5aaf7265b1a3231654650149a5c25b8a836d
|
[
"MIT"
] | null | null | null |
import sys
from typing import List, Optional
import pytest
from typing_extensions import Annotated
import strawberry
from strawberry.arguments import UNSET
from strawberry.exceptions import MultipleStrawberryArgumentsError
def test_basic_arguments():
@strawberry.type
class Query:
@strawberry.field
def name(self, argument: str, optional_argument: Optional[str]) -> str:
return "Name"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 2
assert definition.fields[0].arguments[0].graphql_name == "argument"
assert definition.fields[0].arguments[0].type == str
assert definition.fields[0].arguments[0].is_optional is False
assert definition.fields[0].arguments[1].graphql_name == "optionalArgument"
assert definition.fields[0].arguments[1].type == str
assert definition.fields[0].arguments[1].is_optional
def test_input_type_as_argument():
@strawberry.input
class Input:
name: str
@strawberry.type
class Query:
@strawberry.field
def name(self, input: Input, optional_input: Optional[Input]) -> str:
return input.name
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 2
assert definition.fields[0].arguments[0].graphql_name == "input"
assert definition.fields[0].arguments[0].type == Input
assert definition.fields[0].arguments[0].is_optional is False
assert definition.fields[0].arguments[1].graphql_name == "optionalInput"
assert definition.fields[0].arguments[1].type == Input
assert definition.fields[0].arguments[1].is_optional
def test_arguments_lists():
@strawberry.input
class Input:
name: str
@strawberry.type
class Query:
@strawberry.field
def names(self, inputs: List[Input]) -> List[str]:
return [input.name for input in inputs]
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 1
assert definition.fields[0].arguments[0].graphql_name == "inputs"
assert definition.fields[0].arguments[0].type is None
assert definition.fields[0].arguments[0].is_list
assert definition.fields[0].arguments[0].is_optional is False
assert definition.fields[0].arguments[0].child.graphql_name is None
assert definition.fields[0].arguments[0].child.type == Input
assert definition.fields[0].arguments[0].child.is_optional is False
def test_arguments_lists_of_optionals():
@strawberry.input
class Input:
name: str
@strawberry.type
class Query:
@strawberry.field
def names(self, inputs: List[Optional[Input]]) -> List[str]:
return [input.name for input in inputs if input]
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 1
assert definition.fields[0].arguments[0].graphql_name == "inputs"
assert definition.fields[0].arguments[0].type is None
assert definition.fields[0].arguments[0].is_list
assert definition.fields[0].arguments[0].is_optional is False
assert definition.fields[0].arguments[0].child.type == Input
assert definition.fields[0].arguments[0].child.is_optional is True
def test_basic_arguments_on_resolver():
def name_resolver(
id: strawberry.ID, argument: str, optional_argument: Optional[str]
) -> str:
return "Name"
@strawberry.type
class Query:
name: str = strawberry.field(resolver=name_resolver)
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 3
assert definition.fields[0].arguments[0].graphql_name == "id"
assert definition.fields[0].arguments[0].type == strawberry.ID
assert definition.fields[0].arguments[0].is_optional is False
assert definition.fields[0].arguments[1].graphql_name == "argument"
assert definition.fields[0].arguments[1].type == str
assert definition.fields[0].arguments[1].is_optional is False
assert definition.fields[0].arguments[2].graphql_name == "optionalArgument"
assert definition.fields[0].arguments[2].type == str
assert definition.fields[0].arguments[2].is_optional
def test_arguments_when_extending_a_type():
def name_resolver(
id: strawberry.ID, argument: str, optional_argument: Optional[str]
) -> str:
return "Name"
@strawberry.type
class NameQuery:
name: str = strawberry.field(resolver=name_resolver)
@strawberry.type
class Query(NameQuery):
pass
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields) == 1
assert len(definition.fields[0].arguments) == 3
assert definition.fields[0].arguments[0].graphql_name == "id"
assert definition.fields[0].arguments[0].type == strawberry.ID
assert definition.fields[0].arguments[0].is_optional is False
assert definition.fields[0].arguments[1].graphql_name == "argument"
assert definition.fields[0].arguments[1].type == str
assert definition.fields[0].arguments[1].is_optional is False
assert definition.fields[0].arguments[2].graphql_name == "optionalArgument"
assert definition.fields[0].arguments[2].type == str
assert definition.fields[0].arguments[2].is_optional
def test_arguments_when_extending_multiple_types():
def name_resolver(id: strawberry.ID) -> str:
return "Name"
def name_2_resolver(id: strawberry.ID) -> str:
return "Name 2"
@strawberry.type
class NameQuery:
name: str = strawberry.field(permission_classes=[], resolver=name_resolver)
@strawberry.type
class ExampleQuery:
name_2: str = strawberry.field(permission_classes=[], resolver=name_2_resolver)
@strawberry.type
class RootQuery(NameQuery, ExampleQuery):
pass
definition = RootQuery._type_definition
assert definition.name == "RootQuery"
assert len(definition.fields) == 2
assert len(definition.fields[0].arguments) == 1
assert definition.fields[0].arguments[0].graphql_name == "id"
assert definition.fields[0].arguments[0].type == strawberry.ID
assert definition.fields[0].arguments[0].is_optional is False
assert len(definition.fields[1].arguments) == 1
assert definition.fields[1].graphql_name == "name2"
assert definition.fields[1].arguments[0].graphql_name == "id"
assert definition.fields[1].arguments[0].type == strawberry.ID
assert definition.fields[1].arguments[0].is_optional is False
def test_argument_with_default_value_none():
@strawberry.type
class Query:
@strawberry.field
def name(self, argument: Optional[str] = None) -> str:
return "Name"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 1
argument = definition.fields[0].arguments[0]
assert argument.graphql_name == "argument"
assert argument.type == str
assert argument.is_optional is True
assert argument.description is None
assert argument.default is None
def test_argument_with_default_value_undefined():
@strawberry.type
class Query:
@strawberry.field
def name(self, argument: Optional[str]) -> str:
return "Name"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 1
argument = definition.fields[0].arguments[0]
assert argument.graphql_name == "argument"
assert argument.type == str
assert argument.is_optional is True
assert argument.description is None
assert argument.default is UNSET
def test_annotated_argument_on_resolver():
@strawberry.type
class Query:
@strawberry.field
def name( # type: ignore
argument: Annotated[
str,
strawberry.argument(description="This is a description"), # noqa: F722
]
) -> str:
return "Name"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 1
argument = definition.fields[0].arguments[0]
assert argument.graphql_name == "argument"
assert argument.type == str
assert argument.is_optional is False
assert argument.description == "This is a description"
def test_annotated_optional_arguments_on_resolver():
@strawberry.type
class Query:
@strawberry.field
def name( # type: ignore
argument: Annotated[
Optional[str],
strawberry.argument(description="This is a description"), # noqa: F722
]
) -> str:
return "Name"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 1
argument = definition.fields[0].arguments[0]
assert argument.graphql_name == "argument"
assert argument.type == str
assert argument.is_optional is True
assert argument.description == "This is a description"
def test_annotated_argument_with_default_value():
@strawberry.type
class Query:
@strawberry.field
def name( # type: ignore
argument: Annotated[
str,
strawberry.argument(description="This is a description"), # noqa: F722
] = "Patrick"
) -> str:
return "Name"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 1
argument = definition.fields[0].arguments[0]
assert argument.graphql_name == "argument"
assert argument.type == str
assert argument.is_optional is False
assert argument.description == "This is a description"
assert argument.default == "Patrick"
def test_multiple_annotated_arguments_exception():
with pytest.raises(MultipleStrawberryArgumentsError) as error:
@strawberry.field
def name( # type: ignore
argument: Annotated[
str,
strawberry.argument(description="This is a description"), # noqa: F722
strawberry.argument(description="Another description"), # noqa: F722
]
) -> str:
return "Name"
assert str(error.value) == (
"Annotation for argument `argument` "
"on field `name` cannot have multiple "
"`strawberry.argument`s"
)
def test_annotated_with_other_information():
@strawberry.type
class Query:
@strawberry.field
def name( # type: ignore
argument: Annotated[str, "Some other info"] # noqa: F722
) -> str:
return "Name"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 1
argument = definition.fields[0].arguments[0]
assert argument.graphql_name == "argument"
assert argument.type == str
assert argument.is_optional is False
assert argument.description is None
@pytest.mark.skipif(
sys.version_info < (3, 9),
reason="Annotated type was added in python 3.9",
)
def test_annotated_python_39():
from typing import Annotated
@strawberry.type
class Query:
@strawberry.field
def name( # type: ignore
argument: Annotated[
str,
strawberry.argument(description="This is a description"), # noqa: F722
]
) -> str:
return "Name"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields[0].arguments) == 1
argument = definition.fields[0].arguments[0]
assert argument.graphql_name == "argument"
assert argument.type == str
assert argument.is_optional is False
assert argument.description == "This is a description"
| 29.762836
| 87
| 0.676661
| 1,437
| 12,173
| 5.623521
| 0.068198
| 0.146517
| 0.140948
| 0.215567
| 0.862022
| 0.848781
| 0.824155
| 0.786041
| 0.755723
| 0.753372
| 0
| 0.018573
| 0.21712
| 12,173
| 408
| 88
| 29.835784
| 0.829381
| 0.012651
| 0
| 0.725086
| 0
| 0
| 0.055643
| 0.001833
| 0
| 0
| 0
| 0
| 0.388316
| 1
| 0.106529
| false
| 0.006873
| 0.027491
| 0.054983
| 0.281787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4d2b388ba8f768ed9d9319d18fd12a4bdb5f7b94
| 5,609
|
py
|
Python
|
src/python/conway_tests.py
|
eweill/ConwayGameOfLife
|
c3cda523364dac03a0f111bc1fac6cfe0b604a2f
|
[
"MIT"
] | null | null | null |
src/python/conway_tests.py
|
eweill/ConwayGameOfLife
|
c3cda523364dac03a0f111bc1fac6cfe0b604a2f
|
[
"MIT"
] | null | null | null |
src/python/conway_tests.py
|
eweill/ConwayGameOfLife
|
c3cda523364dac03a0f111bc1fac6cfe0b604a2f
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
from random import randint
from copy import deepcopy
import numpy as np
import unittest, sys
import math
from GoLquadtree import GoLNode, GoLQuadTree
from conway import ConwayGOLGrid, ConwayGOLCell
import os, psutil, gc
from datetime import datetime
def memory_usage_psutil():
"""
Returns the current memory usage of python
"""
process = psutil.Process(os.getpid())
mem = process.memory_info()[0] / float(2**20)
return mem
if __name__ == '__main__':
# This is going to run multiple sizes of the game with each different
# version and time and compare memory usage of all versions.
grid_sizes = [16, 32, 64, 128]#, 256]
versions = ["Naive", "Optimized", "QuadTree"]
average_naive = []
average_opt = []
average_quad = []
naive_time = []
opt_time = []
quad_time = []
for grid in grid_sizes:
start_cells = []
for x in range(grid):
for y in range(grid):
if randint(0, 100) < 30:
start_cells.append((x,y))
for version in range(3):
print "Starting", versions[version], " at grid size", grid, "random"
start = datetime.now()
game = ConwayGOLGrid(grid, grid, start_cells, version, "B3/S23")
memory_used = memory_usage_psutil()
count = 0
while(count < 100 and game.update()):
count += 1
memory_used += memory_usage_psutil()
final = datetime.now()-start
memory_used /= (count + 1)
game = []
gc.collect()
if version == 0:
average_naive.append(memory_used)
naive_time.append(int(final.total_seconds()))
elif version == 1:
average_opt.append(memory_used)
opt_time.append(int(final.total_seconds()))
else:
average_quad.append(memory_used)
quad_time.append(int(final.total_seconds()))
fix, ax = plt.subplots()
index = np.arange(len(grid_sizes))
bar_width = 0.35
error_config = {'ecolor':'0.3'}
rects_naive = plt.bar(index, average_naive, bar_width, color='b', error_kw=error_config, label='Naive')
rects_opt = plt.bar(index+bar_width, average_opt, bar_width, color='r', error_kw=error_config, label='Optimized')
rects_quad = plt.bar(index+(2*bar_width), average_quad, bar_width, color='g', error_kw=error_config, label='QuadTree')
plt.xlabel('Grid Size')
plt.ylabel('Average Memory Usage')
plt.title('Memory Used by Different Optimization Strategies')
plt.xticks(index + (2 * bar_width / 3), grid_sizes)
plt.legend(loc=2)
plt.tight_layout()
#plt.show()
plt.savefig('average_random.png')
fix, ax = plt.subplots()
index = np.arange(len(grid_sizes))
bar_width = 0.35
error_config = {'ecolor':'0.3'}
rects_naive = plt.bar(index, naive_time, bar_width, color='b', error_kw=error_config, label='Naive')
rects_opt = plt.bar(index+bar_width, opt_time, bar_width, color='r', error_kw=error_config, label='Optimized')
rects_quad = plt.bar(index+(2*bar_width), quad_time, bar_width, color='g', error_kw=error_config, label='QuadTree')
plt.xlabel('Grid Size')
plt.ylabel('Execution Time for 100 Generations')
plt.title('Time Taken by Different Optimization Strategies')
plt.xticks(index + (2 * bar_width / 3), grid_sizes)
plt.legend(loc=2)
plt.tight_layout()
#plt.show()
plt.savefig('time_random.png')
average_naive = []
average_opt = []
average_quad = []
naive_time = []
opt_time = []
quad_time = []
for grid in grid_sizes:
start_cells = [(grid/2, grid/2)]
for version in range(3):
print "Starting", versions[version], " at grid size", grid, "random"
start = datetime.now()
game = ConwayGOLGrid(grid, grid, start_cells, version, "B1/S12")
memory_used = memory_usage_psutil()
count = 0
while(count < 100 and game.update()):
count += 1
memory_used += memory_usage_psutil()
final = datetime.now() - start
memory_used /= (count + 1)
game = []
gc.collect()
if version == 0:
average_naive.append(memory_used)
naive_time.append(int(final.total_seconds()))
elif version == 1:
average_opt.append(memory_used)
opt_time.append(int(final.total_seconds()))
else:
average_quad.append(memory_used)
quad_time.append(int(final.total_seconds()))
fix, ax = plt.subplots()
index = np.arange(len(grid_sizes))
bar_width = 0.35
error_config = {'ecolor':'0.3'}
rects_naive = plt.bar(index, average_naive, bar_width, color='b', error_kw=error_config, label='Naive')
rects_opt = plt.bar(index+bar_width, average_opt, bar_width, color='r', error_kw=error_config, label='Optimized')
rects_quad = plt.bar(index+(2*bar_width), average_quad, bar_width, color='g', error_kw=error_config, label='QuadTree')
plt.xlabel('Grid Size')
plt.ylabel('Average Memory Usage')
plt.title('Memory Used by Different Optimization Strategies')
plt.xticks(index + (2 * bar_width / 3), grid_sizes)
plt.legend(loc=2)
plt.tight_layout()
#plt.show()
plt.savefig('mem_triangle.png')
fix, ax = plt.subplots()
index = np.arange(len(grid_sizes))
bar_width = 0.35
error_config = {'ecolor':'0.3'}
rects_naive = plt.bar(index, naive_time, bar_width, color='b', error_kw=error_config, label='Naive')
rects_opt = plt.bar(index+bar_width, opt_time, bar_width, color='r', error_kw=error_config, label='Optimized')
rects_quad = plt.bar(index+(2*bar_width), quad_time, bar_width, color='g', error_kw=error_config, label='QuadTree')
plt.xlabel('Grid Size')
plt.ylabel('Execution Time for 100 Generations')
plt.title('Time Taken by Different Optimization Strategies')
plt.xticks(index + (2 * bar_width / 3), grid_sizes)
plt.legend(loc=2)
plt.tight_layout()
#plt.show()
plt.savefig('time_triangle.png')
| 24.281385
| 119
| 0.694776
| 829
| 5,609
| 4.501809
| 0.171291
| 0.060021
| 0.03537
| 0.057878
| 0.825831
| 0.825831
| 0.825831
| 0.825831
| 0.825831
| 0.825831
| 0
| 0.019182
| 0.163487
| 5,609
| 230
| 120
| 24.386957
| 0.776215
| 0.0312
| 0
| 0.788321
| 0
| 0
| 0.118153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.072993
| null | null | 0.014599
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4d2b817df1ab461f57db4c44477f7a7ebb3348b8
| 49
|
py
|
Python
|
src/tesseract_baker/ui/__init__.py
|
starofrainnight/tesseract_baker
|
1f4d2f63784fe044ce09b7f6ba83583984c59ba1
|
[
"MIT"
] | null | null | null |
src/tesseract_baker/ui/__init__.py
|
starofrainnight/tesseract_baker
|
1f4d2f63784fe044ce09b7f6ba83583984c59ba1
|
[
"MIT"
] | null | null | null |
src/tesseract_baker/ui/__init__.py
|
starofrainnight/tesseract_baker
|
1f4d2f63784fe044ce09b7f6ba83583984c59ba1
|
[
"MIT"
] | null | null | null |
import rabird.pyside
rabird.pyside.import_uis()
| 12.25
| 26
| 0.816327
| 7
| 49
| 5.571429
| 0.571429
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 3
| 27
| 16.333333
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4d6fbeabf6207b7ab2d8c6a99b3b8906ccdb2482
| 36,162
|
py
|
Python
|
clients/python/lakefs_client/api/branches_api.py
|
NNstorm/lakeFS
|
8d8c179cb442290a7ca5020dcf7e95e41301bcf8
|
[
"Apache-2.0"
] | 1
|
2021-09-09T16:21:14.000Z
|
2021-09-09T16:21:14.000Z
|
clients/python/lakefs_client/api/branches_api.py
|
NNstorm/lakeFS
|
8d8c179cb442290a7ca5020dcf7e95e41301bcf8
|
[
"Apache-2.0"
] | null | null | null |
clients/python/lakefs_client/api/branches_api.py
|
NNstorm/lakeFS
|
8d8c179cb442290a7ca5020dcf7e95e41301bcf8
|
[
"Apache-2.0"
] | null | null | null |
"""
lakeFS API
lakeFS HTTP API # noqa: E501
The version of the OpenAPI document: 0.1.0
Contact: services@treeverse.io
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from lakefs_client.api_client import ApiClient, Endpoint as _Endpoint
from lakefs_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from lakefs_client.model.branch_creation import BranchCreation
from lakefs_client.model.diff_list import DiffList
from lakefs_client.model.error import Error
from lakefs_client.model.ref import Ref
from lakefs_client.model.ref_list import RefList
from lakefs_client.model.reset_creation import ResetCreation
from lakefs_client.model.revert_creation import RevertCreation
class BranchesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __create_branch(
self,
repository,
branch_creation,
**kwargs
):
"""create branch # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_branch(repository, branch_creation, async_req=True)
>>> result = thread.get()
Args:
repository (str):
branch_creation (BranchCreation):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['repository'] = \
repository
kwargs['branch_creation'] = \
branch_creation
return self.call_with_http_info(**kwargs)
self.create_branch = _Endpoint(
settings={
'response_type': (str,),
'auth': [
'basic_auth',
'cookie_auth',
'jwt_token'
],
'endpoint_path': '/repositories/{repository}/branches',
'operation_id': 'create_branch',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'repository',
'branch_creation',
],
'required': [
'repository',
'branch_creation',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'repository':
(str,),
'branch_creation':
(BranchCreation,),
},
'attribute_map': {
'repository': 'repository',
},
'location_map': {
'repository': 'path',
'branch_creation': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'text/html',
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__create_branch
)
def __delete_branch(
self,
repository,
branch,
**kwargs
):
"""delete branch # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_branch(repository, branch, async_req=True)
>>> result = thread.get()
Args:
repository (str):
branch (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['repository'] = \
repository
kwargs['branch'] = \
branch
return self.call_with_http_info(**kwargs)
self.delete_branch = _Endpoint(
settings={
'response_type': None,
'auth': [
'basic_auth',
'cookie_auth',
'jwt_token'
],
'endpoint_path': '/repositories/{repository}/branches/{branch}',
'operation_id': 'delete_branch',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'repository',
'branch',
],
'required': [
'repository',
'branch',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'repository':
(str,),
'branch':
(str,),
},
'attribute_map': {
'repository': 'repository',
'branch': 'branch',
},
'location_map': {
'repository': 'path',
'branch': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__delete_branch
)
def __diff_branch(
self,
repository,
branch,
**kwargs
):
"""diff branch # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.diff_branch(repository, branch, async_req=True)
>>> result = thread.get()
Args:
repository (str):
branch (str):
Keyword Args:
after (str): return items after this value. [optional]
amount (int): how many items to return. [optional] if omitted the server will use the default value of 100
prefix (str): return items prefixed with this value. [optional]
delimiter (str): delimiter used to group common prefixes by. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DiffList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['repository'] = \
repository
kwargs['branch'] = \
branch
return self.call_with_http_info(**kwargs)
self.diff_branch = _Endpoint(
settings={
'response_type': (DiffList,),
'auth': [
'basic_auth',
'cookie_auth',
'jwt_token'
],
'endpoint_path': '/repositories/{repository}/branches/{branch}/diff',
'operation_id': 'diff_branch',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'repository',
'branch',
'after',
'amount',
'prefix',
'delimiter',
],
'required': [
'repository',
'branch',
],
'nullable': [
],
'enum': [
],
'validation': [
'amount',
]
},
root_map={
'validations': {
('amount',): {
'inclusive_maximum': 1000,
'inclusive_minimum': -1,
},
},
'allowed_values': {
},
'openapi_types': {
'repository':
(str,),
'branch':
(str,),
'after':
(str,),
'amount':
(int,),
'prefix':
(str,),
'delimiter':
(str,),
},
'attribute_map': {
'repository': 'repository',
'branch': 'branch',
'after': 'after',
'amount': 'amount',
'prefix': 'prefix',
'delimiter': 'delimiter',
},
'location_map': {
'repository': 'path',
'branch': 'path',
'after': 'query',
'amount': 'query',
'prefix': 'query',
'delimiter': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__diff_branch
)
def __get_branch(
self,
repository,
branch,
**kwargs
):
"""get branch # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_branch(repository, branch, async_req=True)
>>> result = thread.get()
Args:
repository (str):
branch (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Ref
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['repository'] = \
repository
kwargs['branch'] = \
branch
return self.call_with_http_info(**kwargs)
self.get_branch = _Endpoint(
settings={
'response_type': (Ref,),
'auth': [
'basic_auth',
'cookie_auth',
'jwt_token'
],
'endpoint_path': '/repositories/{repository}/branches/{branch}',
'operation_id': 'get_branch',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'repository',
'branch',
],
'required': [
'repository',
'branch',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'repository':
(str,),
'branch':
(str,),
},
'attribute_map': {
'repository': 'repository',
'branch': 'branch',
},
'location_map': {
'repository': 'path',
'branch': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_branch
)
def __list_branches(
self,
repository,
**kwargs
):
"""list branches # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_branches(repository, async_req=True)
>>> result = thread.get()
Args:
repository (str):
Keyword Args:
prefix (str): return items prefixed with this value. [optional]
after (str): return items after this value. [optional]
amount (int): how many items to return. [optional] if omitted the server will use the default value of 100
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
RefList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['repository'] = \
repository
return self.call_with_http_info(**kwargs)
self.list_branches = _Endpoint(
settings={
'response_type': (RefList,),
'auth': [
'basic_auth',
'cookie_auth',
'jwt_token'
],
'endpoint_path': '/repositories/{repository}/branches',
'operation_id': 'list_branches',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'repository',
'prefix',
'after',
'amount',
],
'required': [
'repository',
],
'nullable': [
],
'enum': [
],
'validation': [
'amount',
]
},
root_map={
'validations': {
('amount',): {
'inclusive_maximum': 1000,
'inclusive_minimum': -1,
},
},
'allowed_values': {
},
'openapi_types': {
'repository':
(str,),
'prefix':
(str,),
'after':
(str,),
'amount':
(int,),
},
'attribute_map': {
'repository': 'repository',
'prefix': 'prefix',
'after': 'after',
'amount': 'amount',
},
'location_map': {
'repository': 'path',
'prefix': 'query',
'after': 'query',
'amount': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__list_branches
)
def __reset_branch(
self,
repository,
branch,
reset_creation,
**kwargs
):
"""reset branch # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_branch(repository, branch, reset_creation, async_req=True)
>>> result = thread.get()
Args:
repository (str):
branch (str):
reset_creation (ResetCreation):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['repository'] = \
repository
kwargs['branch'] = \
branch
kwargs['reset_creation'] = \
reset_creation
return self.call_with_http_info(**kwargs)
self.reset_branch = _Endpoint(
settings={
'response_type': None,
'auth': [
'basic_auth',
'cookie_auth',
'jwt_token'
],
'endpoint_path': '/repositories/{repository}/branches/{branch}',
'operation_id': 'reset_branch',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'repository',
'branch',
'reset_creation',
],
'required': [
'repository',
'branch',
'reset_creation',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'repository':
(str,),
'branch':
(str,),
'reset_creation':
(ResetCreation,),
},
'attribute_map': {
'repository': 'repository',
'branch': 'branch',
},
'location_map': {
'repository': 'path',
'branch': 'path',
'reset_creation': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__reset_branch
)
def __revert_branch(
self,
repository,
branch,
revert_creation,
**kwargs
):
"""revert # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revert_branch(repository, branch, revert_creation, async_req=True)
>>> result = thread.get()
Args:
repository (str):
branch (str):
revert_creation (RevertCreation):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['repository'] = \
repository
kwargs['branch'] = \
branch
kwargs['revert_creation'] = \
revert_creation
return self.call_with_http_info(**kwargs)
self.revert_branch = _Endpoint(
settings={
'response_type': None,
'auth': [
'basic_auth',
'cookie_auth',
'jwt_token'
],
'endpoint_path': '/repositories/{repository}/branches/{branch}/revert',
'operation_id': 'revert_branch',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'repository',
'branch',
'revert_creation',
],
'required': [
'repository',
'branch',
'revert_creation',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'repository':
(str,),
'branch':
(str,),
'revert_creation':
(RevertCreation,),
},
'attribute_map': {
'repository': 'repository',
'branch': 'branch',
},
'location_map': {
'repository': 'path',
'branch': 'path',
'revert_creation': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__revert_branch
)
| 35.348974
| 122
| 0.436453
| 2,852
| 36,162
| 5.302595
| 0.073983
| 0.029756
| 0.024069
| 0.024995
| 0.862064
| 0.838392
| 0.834491
| 0.821662
| 0.815579
| 0.794419
| 0
| 0.003132
| 0.479066
| 36,162
| 1,022
| 123
| 35.383562
| 0.79966
| 0.295642
| 0
| 0.699438
| 1
| 0
| 0.22149
| 0.0335
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011236
| false
| 0
| 0.015449
| 0
| 0.037921
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4db3f2f9b75b2e7d6ddca301f177e334d05db430
| 52,637
|
py
|
Python
|
app/productdb/tests/test_productdb_views.py
|
thomascrha/product-database
|
f561d05850898ce8e3466c79ba664bd576b1fa8c
|
[
"MIT"
] | null | null | null |
app/productdb/tests/test_productdb_views.py
|
thomascrha/product-database
|
f561d05850898ce8e3466c79ba664bd576b1fa8c
|
[
"MIT"
] | null | null | null |
app/productdb/tests/test_productdb_views.py
|
thomascrha/product-database
|
f561d05850898ce8e3466c79ba664bd576b1fa8c
|
[
"MIT"
] | null | null | null |
"""
Test suite for the productdb.views module
"""
import datetime
import pytest
from django.contrib.messages.storage.fallback import FallbackStorage
from django.contrib.auth.models import AnonymousUser, Permission
from django.core.exceptions import PermissionDenied
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.http import Http404
from django.test import RequestFactory
from mixer.backend.django import mixer
from app.productdb import views
from app.productdb.models import ProductList, Product, ProductMigrationOption, Vendor, ProductMigrationSource, \
ProductCheck
pytestmark = pytest.mark.django_db
def patch_contrib_messages(request):
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
return messages
@pytest.mark.usefixtures("import_default_vendors")
class TestHomeView:
URL_NAME = "productdb:home"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.home(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.home(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.home(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
class TestAboutView:
URL_NAME = "productdb:about"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.about_view(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.about_view(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.about_view(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
class TestBrowseVendorProductsView:
URL_NAME = "productdb:browse_vendor_products"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.browse_vendor_products(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.browse_vendor_products(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.browse_vendor_products(request)
assert response.status_code == 200, "Should be callable"
def test_select_vendor_default(self):
url = reverse(self.URL_NAME)
# a predefined value should be selected by default
default_vendor = '<option value="1" selected>Cisco Systems</option>'
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.browse_vendor_products(request)
assert response.status_code == 200, "Should be callable"
assert default_vendor in response.content.decode()
def test_select_vendor_by_user(self):
url = reverse(self.URL_NAME)
selected_vendor = '<option value="2" selected>Juniper Networks</option>'
data = {"vendor_selection": 2}
request = RequestFactory().post(url, data=data)
request.user = AnonymousUser()
response = views.browse_vendor_products(request)
assert response.status_code == 200, "Should be callable"
assert selected_vendor in response.content.decode()
# call with invalid ID
default_vendor = '<option value="1" selected>Cisco Systems</option>'
data = {"vendor_selection": 999}
request = RequestFactory().post(url, data=data)
request.user = AnonymousUser()
response = views.browse_vendor_products(request)
assert response.status_code == 200, "Should be callable"
assert selected_vendor not in response.content.decode()
assert default_vendor in response.content.decode()
@pytest.mark.usefixtures("import_default_vendors")
class TestBrowseAllProductsView:
URL_NAME = "productdb:all_products"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.browse_all_products(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.browse_all_products(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.browse_all_products(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
class TestListProductGroupsView:
URL_NAME = "productdb:list-product_groups"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.list_product_groups(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.list_product_groups(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.list_product_groups(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
class TestListProductListsView:
URL_NAME = "productdb:list-product_lists"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.list_product_lists(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.list_product_lists(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.list_product_lists(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
class TestDetailProductGroupView:
URL_NAME = "productdb:detail-product_group"
def test_without_parameter(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.detail_product_group(request)
assert response.status_code == 302, "Should redirect to list-product_groups view"
assert response.url == reverse("productdb:list-product_groups")
def test_url_format(self):
# call detail URL without a parameter must result in a sub-URL which can be extended
# to a full detail URL (required for Datatable rendering)
pg = mixer.blend("productdb.ProductGroup")
dt_url = reverse(self.URL_NAME)
full_url = reverse(self.URL_NAME, kwargs={"product_group_id": pg.id})
assert full_url.startswith(dt_url), "detail URL without a parameter must result in a sub-URL"
def test_404(self):
url = reverse(self.URL_NAME, kwargs={"product_group_id": 9999})
request = RequestFactory().get(url)
request.user = AnonymousUser()
with pytest.raises(Http404):
views.detail_product_group(request, 9999)
def test_anonymous_default(self):
pg = mixer.blend("productdb.ProductGroup")
url = reverse(self.URL_NAME, kwargs={"product_group_id": pg.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.detail_product_group(request, pg.id)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
pg = mixer.blend("productdb.ProductGroup")
url = reverse(self.URL_NAME, kwargs={"product_group_id": pg.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.detail_product_group(request, pg.id)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
pg = mixer.blend("productdb.ProductGroup")
url = reverse(self.URL_NAME, kwargs={"product_group_id": pg.id})
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.detail_product_group(request, pg.id)
assert response.status_code == 200, "Should be callable"
class TestShareProductListView:
URL_NAME = "productdb:share-product_list"
def test_404(self):
url = reverse(self.URL_NAME, kwargs={"product_list_id": 9999})
request = RequestFactory().get(url)
request.user = AnonymousUser()
with pytest.raises(Http404):
views.share_product_list(request, 9999)
@pytest.mark.usefixtures("import_default_vendors")
def test_anonymous_default(self):
p = mixer.blend("productdb.Product")
pl = mixer.blend("productdb.ProductList", string_product_list=p.product_id)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.share_product_list(request, pl.id)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
@pytest.mark.usefixtures("import_default_vendors")
def test_anonymous_login_only_mode(self):
p = mixer.blend("productdb.Product")
pl = mixer.blend("productdb.ProductList", string_product_list=p.product_id)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.share_product_list(request, pl.id)
assert response.status_code == 200, "Share link is also callable in login only mode"
@pytest.mark.usefixtures("import_default_vendors")
def test_authenticated_user(self):
p = mixer.blend("productdb.Product")
pl = mixer.blend("productdb.ProductList", string_product_list=p.product_id)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.share_product_list(request, pl.id)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
class TestDetailProductListView:
URL_NAME = "productdb:detail-product_list"
def test_without_parameter(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.detail_product_list(request)
assert response.status_code == 302, "Should redirect to list-product_list view"
assert response.url == reverse("productdb:list-product_lists")
def test_url_format(self):
# call detail URL without a parameter must result in a sub-URL which can be extended
# to a full detail URL (required for Datatable rendering)
p = mixer.blend("productdb.Product")
pl = mixer.blend("productdb.ProductList", string_product_list=p.product_id)
dt_url = reverse(self.URL_NAME)
full_url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
assert full_url.startswith(dt_url), "detail URL without a parameter must result in a sub-URL"
def test_404(self):
url = reverse(self.URL_NAME, kwargs={"product_list_id": 9999})
request = RequestFactory().get(url)
request.user = AnonymousUser()
with pytest.raises(Http404):
views.detail_product_list(request, 9999)
def test_anonymous_default(self):
p = mixer.blend("productdb.Product")
pl = mixer.blend("productdb.ProductList", string_product_list=p.product_id)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.detail_product_list(request, pl.id)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
p = mixer.blend("productdb.Product")
pl = mixer.blend("productdb.ProductList", string_product_list=p.product_id)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.detail_product_list(request, pl.id)
assert response.status_code == 302, "Should redirect to login page"
assert response.url.startswith("/productdb/share/productlist/"), \
"if the user is not logged in, it is redirected to the sharelink"
def test_authenticated_user(self):
p = mixer.blend("productdb.Product")
pl = mixer.blend("productdb.ProductList", string_product_list=p.product_id)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.detail_product_list(request, pl.id)
assert response.status_code == 200, "Should be callable"
class TestProductDetailsView:
URL_NAME = "productdb:product-detail"
@pytest.mark.usefixtures("import_default_vendors")
def test_without_parameter(self):
url = reverse("productdb:product-list")
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.view_product_details(request)
assert response.status_code == 302, "Should redirect to list-product_list view"
assert response.url == reverse("productdb:all_products")
@pytest.mark.usefixtures("import_default_vendors")
def test_url_format(self):
# call detail URL without a parameter must result in a sub-URL which can be extended
# to a full detail URL (required for Datatable rendering)
p = mixer.blend("productdb.Product")
dt_url = reverse("productdb:product-list")
full_url = reverse(self.URL_NAME, kwargs={"product_id": p.id})
assert full_url.startswith(dt_url), "detail URL without a parameter must result in a sub-URL"
def test_404(self):
url = reverse(self.URL_NAME, kwargs={"product_id": 9999})
request = RequestFactory().get(url)
request.user = AnonymousUser()
with pytest.raises(Http404):
views.view_product_details(request, 9999)
@pytest.mark.usefixtures("import_default_vendors")
def test_anonymous_default(self):
p = mixer.blend("productdb.Product")
url = reverse(self.URL_NAME, kwargs={"product_id": p.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.view_product_details(request, p.id)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
@pytest.mark.usefixtures("import_default_vendors")
def test_anonymous_login_only_mode(self):
p = mixer.blend("productdb.Product")
url = reverse(self.URL_NAME, kwargs={"product_id": p.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.view_product_details(request, p.id)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("import_default_vendors")
def test_authenticated_user(self):
p = mixer.blend("productdb.Product")
url = reverse(self.URL_NAME, kwargs={"product_id": p.id})
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.view_product_details(request, p.id)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
def test_detail_view_with_migration_options(self):
# create basic object structure
group1 = ProductMigrationSource.objects.create(name="Group One")
group2 = ProductMigrationSource.objects.create(name="Group Two", preference=100)
root_product = mixer.blend("productdb.Product", product_id="C2960XS", vendor=Vendor.objects.get(id=1))
p11 = mixer.blend("productdb.Product", product_id="C2960XL", vendor=Vendor.objects.get(id=1))
p12 = mixer.blend("productdb.Product", product_id="C2960XT", vendor=Vendor.objects.get(id=1))
p23 = mixer.blend("productdb.Product", product_id="C2960XR", vendor=Vendor.objects.get(id=1))
ProductMigrationOption.objects.create(
product=root_product, migration_source=group1,
replacement_product_id=p11.product_id
)
ProductMigrationOption.objects.create(
product=root_product, migration_source=group2,
replacement_product_id=p12.product_id
)
# p12 is replaced by 23 by group 2
ProductMigrationOption.objects.create(
product=p12, migration_source=group2,
replacement_product_id=p23.product_id
)
for pid in [root_product.id, p11.id, p12.id, p23.id]:
url = reverse(self.URL_NAME, kwargs={"product_id": pid})
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.view_product_details(request, pid)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
class TestAddProductListView:
URL_NAME = "productdb:add-product_list"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.add_product_list(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.add_product_list(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
request = RequestFactory().get(url)
request.user = user
# should throw a permission error
with pytest.raises(PermissionDenied):
views.add_product_list(request)
# update user permissions
perm = Permission.objects.get(codename="add_productlist")
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
user.user_permissions.add(perm)
user.save()
request = RequestFactory().get(url)
request.user = user
response = views.add_product_list(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
def test_post(self):
url = reverse(self.URL_NAME)
perm = Permission.objects.get(codename="add_productlist")
p = mixer.blend("productdb.Product")
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
user.user_permissions.add(perm)
user.save()
data = {
"name": "My Product List",
"description": "My description",
"string_product_list": p.product_id
}
request = RequestFactory().post(url, data=data, follow=True)
request.user = user
response = views.add_product_list(request)
assert response.status_code == 302
assert response.url == reverse("productdb:list-product_lists")
assert ProductList.objects.count() == 1, "One element should be created in the database"
@pytest.mark.usefixtures("import_default_vendors")
class TestEditProductListView:
URL_NAME = "productdb:edit-product_list"
def test_404(self):
url = reverse(self.URL_NAME, kwargs={"product_list_id": 9999})
request = RequestFactory().get(url)
perm = Permission.objects.get(codename="change_productlist")
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
request.user = user
user.user_permissions.add(perm)
user.save()
with pytest.raises(Http404):
views.edit_product_list(request, 9999)
def test_anonymous_default(self):
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
pl = mixer.blend(
"productdb.ProductList",
string_product_list=";".join(Product.objects.all().values_list("product_id", flat=True))
)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.edit_product_list(request, pl.id)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
pl = mixer.blend(
"productdb.ProductList",
string_product_list=";".join(Product.objects.all().values_list("product_id", flat=True))
)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.edit_product_list(request, pl.id)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
pl = mixer.blend(
"productdb.ProductList",
string_product_list=";".join(Product.objects.all().values_list("product_id", flat=True))
)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
request = RequestFactory().get(url)
request.user = user
patch_contrib_messages(request)
# should throw a permission error
with pytest.raises(PermissionDenied):
views.edit_product_list(request, pl.id)
# update user permissions
perm = Permission.objects.get(codename="change_productlist")
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
user.user_permissions.add(perm)
user.save()
request = RequestFactory().get(url)
request.user = user
msgs = patch_contrib_messages(request)
response = views.edit_product_list(request, pl.id)
assert response.status_code == 200, "Should be callable"
assert msgs.added_new is True
expected_error = "You are not allowed to change this Product List. Only the original Author is allowed to " \
"perform this action."
assert expected_error in response.content.decode()
def test_post(self):
p = mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
pl = mixer.blend(
"productdb.ProductList",
string_product_list=";".join(Product.objects.all().values_list("product_id", flat=True))
)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
perm = Permission.objects.get(codename="change_productlist")
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
user.user_permissions.add(perm)
user.save()
# another user try to change the product list, but only the original creator is
# allowed to change it
data = {
"name": "My Product List",
"description": "My description",
"string_product_list": p.product_id
}
request = RequestFactory().post(url, data=data, follow=True)
request.user = user
patch_contrib_messages(request)
response = views.edit_product_list(request, pl.id)
assert response.status_code == 200
expected_error = "You are not allowed to change this Product List. Only the original Author is allowed to " \
"perform this action."
assert expected_error in response.content.decode()
# original user try to change the product list
data = {
"name": "My Product List",
"description": "My description",
"string_product_list": p.product_id
}
request = RequestFactory().post(url, data=data, follow=True)
pl.update_user.user_permissions.add(perm)
request.user = pl.update_user
patch_contrib_messages(request)
response = views.edit_product_list(request, pl.id)
assert response.status_code == 302
assert response.url == reverse("productdb:list-product_lists")
assert ProductList.objects.count() == 1, "One element should be created in the database"
@pytest.mark.usefixtures("import_default_vendors")
class TestDeleteProductListView:
URL_NAME = "productdb:delete-product_list"
def test_404(self):
url = reverse(self.URL_NAME, kwargs={"product_list_id": 9999})
request = RequestFactory().get(url)
perm = Permission.objects.get(codename="delete_productlist")
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
request.user = user
user.user_permissions.add(perm)
user.save()
with pytest.raises(Http404):
views.delete_product_list(request, 9999)
def test_anonymous_default(self):
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
pl = mixer.blend(
"productdb.ProductList",
string_product_list=";".join(Product.objects.all().values_list("product_id", flat=True))
)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.delete_product_list(request, pl.id)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
pl = mixer.blend(
"productdb.ProductList",
string_product_list=";".join(Product.objects.all().values_list("product_id", flat=True))
)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.delete_product_list(request, pl.id)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
pl = mixer.blend(
"productdb.ProductList",
string_product_list=";".join(Product.objects.all().values_list("product_id", flat=True))
)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
request = RequestFactory().get(url)
request.user = user
patch_contrib_messages(request)
# should throw a permission error
with pytest.raises(PermissionDenied):
views.delete_product_list(request, pl.id)
# update user permissions
perm = Permission.objects.get(codename="delete_productlist")
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
user.user_permissions.add(perm)
user.save()
request = RequestFactory().get(url)
request.user = user
msgs = patch_contrib_messages(request)
response = views.delete_product_list(request, pl.id)
assert response.status_code == 200, "Should be callable"
assert msgs.added_new is True, "Message should be added"
expected_message = "You are not allowed to change this Product List. Only the " \
"original Author is allowed to perform this action."
assert expected_message in response.content.decode()
# get delete view with correct user
perm = Permission.objects.get(codename="delete_productlist")
pl.update_user.user_permissions.add(perm)
pl.update_user.save()
request = RequestFactory().get(url)
request.user = pl.update_user
patch_contrib_messages(request)
response = views.delete_product_list(request, pl.id)
assert response.status_code == 200, "Should be callable"
assert msgs.added_new is True, "Message should be added"
expected_message = "Be careful, this action cannot be undone!"
assert expected_message in response.content.decode()
def test_post(self):
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
mixer.blend("productdb.Product")
pl = mixer.blend(
"productdb.ProductList",
string_product_list=";".join(Product.objects.all().values_list("product_id", flat=True))
)
url = reverse(self.URL_NAME, kwargs={"product_list_id": pl.id})
perm = Permission.objects.get(codename="delete_productlist")
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
user.user_permissions.add(perm)
user.save()
# another user try to change the product list, but only the original creator is
# allowed to change it
data = {
"really_delete": True
}
request = RequestFactory().post(url, data=data, follow=True)
request.user = user
msgs = patch_contrib_messages(request)
response = views.delete_product_list(request, pl.id)
assert response.status_code == 200
assert msgs.added_new is True
expected_error = "You are not allowed to change this Product List. Only the original Author is allowed to " \
"perform this action."
assert expected_error in response.content.decode()
# original user try to change the product list
data = {
"really_delete": True
}
request = RequestFactory().post(url, data=data, follow=True)
pl.update_user.user_permissions.add(perm)
request.user = pl.update_user
msgs = patch_contrib_messages(request)
response = views.delete_product_list(request, pl.id)
assert response.status_code == 302
assert response.url == reverse("productdb:list-product_lists")
assert msgs.added_new is True
assert ProductList.objects.count() == 0, "One element should be created in the database"
@pytest.mark.usefixtures("import_default_vendors")
class TestImportProductMigrationsView:
URL_NAME = "productdb:import_product_migrations"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.import_product_migrations(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.import_product_migrations(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
# the import product dialog requires the change_product permission
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
with pytest.raises(PermissionDenied):
views.import_product_migrations(request)
request = RequestFactory().get(url)
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
user.user_permissions.add(Permission.objects.get(codename="change_productmigrationoption"))
request.user = user
response = views.import_product_migrations(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("disable_import_product_migrations_task")
def test_post(self):
url = reverse(self.URL_NAME)
user = mixer.blend("auth.User", username="test", is_superuser=False, is_staff=False)
user.user_permissions.add(Permission.objects.get(codename="change_productmigrationoption"))
user.save()
# content is not relevant for this test
request = RequestFactory().post(url, data={"excel_file": SimpleUploadedFile("myfile.xlsx", b"yxz")})
request.user = user
response = views.import_product_migrations(request)
assert response.status_code == 302, "redirect to task in progress view"
assert response.url == reverse("task_in_progress", kwargs={"task_id": "mock_task_id"})
@pytest.mark.usefixtures("disable_import_product_migrations_task")
def test_post_as_superuser(self):
url = reverse(self.URL_NAME)
user = mixer.blend("auth.User", username="test", is_superuser=True, is_staff=False)
user.user_permissions.add(Permission.objects.get(codename="change_productmigrationoption"))
user.save()
# content is not relevant for this test
request = RequestFactory().post(url, data={"excel_file": SimpleUploadedFile("myfile.xlsx", b"yxz")})
request.user = user
response = views.import_product_migrations(request)
assert response.status_code == 302, "redirect to task in progress view"
assert response.url == reverse("task_in_progress", kwargs={"task_id": "mock_task_id"})
@pytest.mark.usefixtures("import_default_vendors")
class TestImportProductsView:
URL_NAME = "productdb:import_products"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.import_products(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.import_products(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
# the import product dialog requires the change_product permission
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
with pytest.raises(PermissionDenied):
views.import_products(request)
request = RequestFactory().get(url)
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
user.user_permissions.add(Permission.objects.get(codename="change_product"))
request.user = user
response = views.import_products(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("disable_import_price_list_task")
def test_post(self):
url = reverse(self.URL_NAME)
user = mixer.blend("auth.User", username="test", is_superuser=False, is_staff=False)
user.user_permissions.add(Permission.objects.get(codename="change_product"))
user.save()
# content is not relevant for this test
request = RequestFactory().post(url, data={
"excel_file": SimpleUploadedFile("myfile.xlsx", b"yxz")
})
request.user = user
response = views.import_products(request)
assert response.status_code == 302, "redirect to task in progress view"
assert response.url == reverse("task_in_progress", kwargs={"task_id": "mock_task_id"})
@pytest.mark.usefixtures("disable_import_price_list_task")
def test_post_as_superuser(self):
url = reverse(self.URL_NAME)
user = mixer.blend("auth.User", username="test", is_superuser=True, is_staff=False)
user.user_permissions.add(Permission.objects.get(codename="change_product"))
user.save()
# content is not relevant for this test
request = RequestFactory().post(url, data={
"excel_file": SimpleUploadedFile("myfile.xlsx", b"yxz")
})
request.user = user
response = views.import_products(request)
assert response.status_code == 302, "redirect to task in progress view"
assert response.url == reverse("task_in_progress", kwargs={"task_id": "mock_task_id"})
@pytest.mark.usefixtures("import_default_vendors")
class TestEditUserProfileView:
URL_NAME = "productdb:edit-user_profile"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.edit_user_profile(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.edit_user_profile(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
# the import product dialog requires the change_product permission
url = reverse(self.URL_NAME)
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
request.user = user
response = views.edit_user_profile(request)
assert response.status_code == 200, "Should be callable"
def test_user_email_is_set_as_initial_value(self):
pass
@pytest.mark.usefixtures("disable_import_price_list_task")
def test_post(self):
url = reverse(self.URL_NAME)
user = mixer.blend("auth.User", username="test", is_superuser=False, is_staff=False)
request = RequestFactory().post(url, data={
"email": "a@b.com",
"preferred_vendor": 1
})
request.user = user
msgs = patch_contrib_messages(request)
response = views.edit_user_profile(request)
assert response.status_code == 302, "redirect to task in progress view"
assert msgs.added_new is True
assert response.url == reverse("productdb:home")
@pytest.mark.usefixtures("disable_import_price_list_task")
def test_post_with_back_to_link(self):
url = reverse(self.URL_NAME) + "?back_to=" + reverse("productdb:about")
user = mixer.blend("auth.User", username="test", is_superuser=False, is_staff=False)
request = RequestFactory().post(url, data={
"email": "a@b.com",
"preferred_vendor": 1
})
request.user = user
msgs = patch_contrib_messages(request)
response = views.edit_user_profile(request)
assert response.status_code == 302, "redirect to task in progress view"
assert msgs.added_new is True
assert response.url == reverse("productdb:about"), "Should return to the back_to reference"
@pytest.mark.usefixtures("import_default_vendors")
class TestListProductCheckView:
URL_NAME = "productdb:list-product_checks"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.list_product_checks(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
@pytest.mark.usefixtures("import_default_vendors")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.list_product_checks(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("import_default_vendors")
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.list_product_checks(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
class TestDetailProductCheckView:
URL_NAME = "productdb:detail-product_check"
def test_anonymous_default(self):
pc = ProductCheck.objects.create(name="Test", input_product_ids="Test")
parameters = {"product_check_id": pc.id}
url = reverse(self.URL_NAME, kwargs=parameters)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.detail_product_check(request, **parameters)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
@pytest.mark.usefixtures("import_default_vendors")
def test_anonymous_login_only_mode(self):
pc = ProductCheck.objects.create(name="Test", input_product_ids="Test")
parameters = {"product_check_id": pc.id}
url = reverse(self.URL_NAME, kwargs=parameters)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.detail_product_check(request, **parameters)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("import_default_vendors")
def test_authenticated_user(self):
pc = ProductCheck.objects.create(name="Test", input_product_ids="Test")
parameters = {"product_check_id": pc.id}
url = reverse(self.URL_NAME, kwargs=parameters)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.detail_product_check(request, **parameters)
assert response.status_code == 200, "Should be callable"
def test_404(self):
parameters = {"product_check_id": 9999}
url = reverse(self.URL_NAME, kwargs=parameters)
request = RequestFactory().get(url)
request.user = AnonymousUser()
with pytest.raises(Http404):
views.detail_product_check(request, **parameters)
@pytest.mark.usefixtures("import_default_vendors")
def test_in_progress_redirect(self):
pc = ProductCheck.objects.create(name="Test", input_product_ids="Test")
pc.task_id = "1234" # if task ID is set, a redirect to the task in progress should occur
pc.save()
assert pc.in_progress is True
parameters = {"product_check_id": pc.id}
url = reverse(self.URL_NAME, kwargs=parameters)
request = RequestFactory().get(url)
request.user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
response = views.detail_product_check(request, **parameters)
assert response.status_code == 302
assert response.url.startswith("/productdb/task/")
@pytest.mark.usefixtures("import_default_vendors")
@pytest.mark.usefixtures("set_celery_always_eager")
class TestCreateProductCheckView:
URL_NAME = "productdb:create-product_check"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.create_product_check(request)
assert response.status_code == 200, "Should be callable without login"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.create_product_check(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
request = RequestFactory().get(url)
request.user = user
response = views.create_product_check(request)
assert response.status_code == 200, "Should be callable without permissions"
@pytest.mark.usefixtures("import_default_vendors")
def test_post(self):
url = reverse(self.URL_NAME)
perm = Permission.objects.get(codename="add_productcheck")
p = mixer.blend("productdb.Product")
user = mixer.blend("auth.User", is_superuser=False, is_staff=False)
user.user_permissions.add(perm)
user.save()
data = {
"name": "My Product check",
"input_product_ids": "test"
}
request = RequestFactory().post(url, data=data, follow=True)
request.user = user
response = views.create_product_check(request)
assert response.status_code == 302
assert response.url.startswith("/productdb/task/")
assert ProductCheck.objects.count() == 1, "One element should be created in the database"
# test public product check
data = {
"name": "My Product check",
"input_product_ids": "test",
"public_product_check": "on"
}
request = RequestFactory().post(url, data=data, follow=True)
request.user = user
response = views.create_product_check(request)
assert response.status_code == 302
assert response.url.startswith("/productdb/task/")
assert ProductCheck.objects.count() == 2, "One element should be created in the database"
| 41.577409
| 117
| 0.671638
| 6,231
| 52,637
| 5.494624
| 0.045258
| 0.028011
| 0.034758
| 0.042206
| 0.924176
| 0.911675
| 0.898239
| 0.88159
| 0.865409
| 0.859947
| 0
| 0.009866
| 0.220092
| 52,637
| 1,265
| 118
| 41.610277
| 0.824126
| 0.028915
| 0
| 0.846075
| 0
| 0
| 0.186311
| 0.050298
| 0
| 0
| 0
| 0
| 0.14577
| 1
| 0.087666
| false
| 0.001019
| 0.068298
| 0
| 0.195719
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4db7dda9b9752ef9825452911532282a4f913e90
| 248
|
py
|
Python
|
guillotina/contrib/templates/interfaces.py
|
rboixaderg/guillotina
|
fcae65c2185222272f3b8fee4bc2754e81e0e983
|
[
"BSD-2-Clause"
] | 173
|
2017-03-10T18:26:12.000Z
|
2022-03-03T06:48:56.000Z
|
guillotina/contrib/templates/interfaces.py
|
rboixaderg/guillotina
|
fcae65c2185222272f3b8fee4bc2754e81e0e983
|
[
"BSD-2-Clause"
] | 921
|
2017-03-08T14:04:43.000Z
|
2022-03-30T10:28:56.000Z
|
guillotina/contrib/templates/interfaces.py
|
rboixaderg/guillotina
|
fcae65c2185222272f3b8fee4bc2754e81e0e983
|
[
"BSD-2-Clause"
] | 60
|
2017-03-16T19:59:44.000Z
|
2022-03-03T06:48:59.000Z
|
from guillotina import schema
from guillotina.interfaces import IAsyncUtility
from guillotina.interfaces import IItem
class IJinjaUtility(IAsyncUtility):
pass
class IJinjaTemplate(IItem):
template = schema.Text(title="Jinja Template")
| 19.076923
| 50
| 0.802419
| 27
| 248
| 7.37037
| 0.555556
| 0.211055
| 0.241206
| 0.301508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137097
| 248
| 12
| 51
| 20.666667
| 0.929907
| 0
| 0
| 0
| 0
| 0
| 0.056452
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.142857
| 0.428571
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
1286f773bcdd2b5df1260a6360ccc13b57be663f
| 96,459
|
py
|
Python
|
act_dictionary.py
|
vikhil0609/quickcompanyGrobid
|
7b9e214cd0cb01c16cf82c6620792b52f6935031
|
[
"Apache-2.0"
] | null | null | null |
act_dictionary.py
|
vikhil0609/quickcompanyGrobid
|
7b9e214cd0cb01c16cf82c6620792b52f6935031
|
[
"Apache-2.0"
] | null | null | null |
act_dictionary.py
|
vikhil0609/quickcompanyGrobid
|
7b9e214cd0cb01c16cf82c6620792b52f6935031
|
[
"Apache-2.0"
] | null | null | null |
act_dic={"The Constitution of India": 'the-constitution-of-india', 'THE PUNJAB LAND REVENUE ACT, 1967': 'the-punjab-land-revenue-act-1967', 'THE SICK INDUSTRIAL COMPANIES (SPECIAL PROVISIONS) ACT, 1985': 'the-sick-industrial-companies-special-provisions-act-1985', 'The Trade and Merchandise Marks Act, 1958': 'the-trade-and-merchandise-marks-act', 'The Prevention of Food Adulteration Act, 1954': 'the-prevention-of-food-adulteration-act-1954', 'The Police Act, 1861': 'the-police-act-1861', 'The General Insurance Business (Nationalisation) Act, 1972': 'the-general-insurance-business-nationalisation-act-1972', 'The Transfer of Property Act, 1882': 'the-transfer-of-property-act-1882', 'The Aluminium Corporation of India Limited (Acquisition and Transfer of Aluminium Undertaking) Act, 1984': 'the-aluminium-corporation-of-india-limited-acquisition-and-transfer-of-aluminium-undertaking-act-1984', 'The Cattle-Trespass Act, 1871': 'the-cattle-trespass-act-1871', 'The SAARC Convention (Suppression of Terrorism) Act, 1993': 'the-saarc-convention-suppression-of-terrorism-act-1993', 'The Central Sales Tax (Amendment) Act, 1972': 'the-central-sales-tax-amendment-act-1972', 'The National Co-operative Development Corporation Act, 1962': 'the-national-co-operative-development-corporation-act-1962', 'The Dock Workers (Safety, Health and Welfare) Act, 1986': 'the-dock-workers-safety-health-and-welfare-act-1986', 'The Andhra Pradesh and Mysore (Transfer of Territory) Act, 1968': 'the-andhra-pradesh-and-mysore-transfer-of-territory-act-1968', 'The Indian Telegraph Act, 1885': 'the-indian-telegraph-act-1885', 'The Delhi Apartment Ownership Act, 1986': 'the-delhi-apartment-ownership-act-1986', 'The Supreme Court Judges (Salaries and Conditions of Service) Act, 1958': 'the-supreme-court-judges-salaries-and-conditions-of-service-act-1958', 'The Prevention of Corruption Act, 1988': 'the-prevention-of-corruption-act-1988', 'The Beedi Workers Welfare Fund\xa0\xa0Act, 1976': 'the-beedi-workers-welfare-fund-act-1976', 'The Water (Prevention and Control of Pollution) Act, 1974': 'the-water-prevention-and-control-of-pollution-act-1974', 'The Emblems and Names (Prevention of Improper Use) Act, 1950': 'the-emblems-and-names-prevention-of-improper-use-act-1950', 'The Manipur (Hill Areas) District Councils Act, 1971': 'the-manipur-hill-areas-district-councils-act-1971', 'The Himachal Pradesh Legislative Assembly (Constitution and Proceedings) Validation Act, 1958': 'the-himachal-pradesh-legislative-assembly-constitution-and-proceedings-validation-act-1958', 'The Jamia Millia Islamia Act, 1988': 'the-jamia-millia-islamia-act-1988', 'The Pensions Act, 1871': 'the-pensions-act-1871', 'The Medical Termination of Pregnancy Act, 1971': 'the-medical-termination-of-pregnancy-act-1971', 'The Madras City Civil Court Act, 1892': 'the-madras-city-civil-court-act-1892', 'The Union Territory Goods and Services Tax Act, 2017': 'the-union-territory-goods-and-services-tax-act-2017', 'The High Court at Bombay (Extension of Jurisdiction to Goa, Daman and Diu) Act, 1981': 'the-high-court-at-bombay-extension-of-jurisdiction-to-goa-daman-and-diu-act-1981', "The Delhi (Urban Areas)\xa0\xa0Tenant's Relief Act, 1961": 'the-delhi-urban-areas-tenant-s-relief-act-1961', 'The Indian Treasure-trove Act, 1878': 'the-indian-treasure-trove-act-1878', 'The North-Eastern Hill University Act, 1973': 'the-north-eastern-hill-university-act-1973', 'The Travancore-Cochin Vehicles Taxation (Amendment and Validation) Act, 1959': 'the-travancore-cochin-vehicles-taxation-amendment-and-validation-act-1959', 'The Special Protection Group Act, 1988': 'the-special-protection-group-act-1988', 'The Coal Mines Provident Fund and Miscellaneous Provisions\xa0Act, 1948': 'the-coal-mines-provident-fund-and-miscellaneous-provisions-act-1948', 'The Haryana Motor Vehicles Taxation Act, 2016 (Act No.24 of 2016)': 'the-haryana-motor-vehicles-taxation-act-2016-act-no-24-of-2016', 'The Railways Act, 1989': 'the-railways-act-1989', 'The Representation of the People Act, 1950': 'the-representation-of-the-people-act-1950', 'The Securities Contracts (Regulation) Act, 1956': 'the-securities-contracts-regulation-act-1956', 'The Post-Graduate Institute of Medical Education and Research, Chandigarh, Act 1966': 'the-post-graduate-institute-of-medical-education-and-research-chandigarh-act-1966', 'The Arms Act, 1959': 'the-arms-act-1959', 'The Usury Laws Repeal Act, 1855': 'the-usury-laws-repeal-act-1855', 'The Transplantation of Human Organs and Tissues Act, 1994': 'the-transplantation-of-human-organs-and-tissues-act-1994', 'The Illegal Migrants (Determination by Tribunals) Act, 1983': 'the-illegal-migrants-determination-by-tribunals-act-1983', 'The Central Vigilance Commission Act, 2003': 'the-central-vigilance-commission-act-2003', 'The Life Insurance Corporation (Amendment) Act, 1957': 'the-life-insurance-corporation-amendment-act-1957', 'The Indian Institute of Management Act, 2017': 'the-indian-institute-of-management-act-2017', 'The Patents Act, 1970': 'the-patents-act-1970', 'The Rehabilitation Council of India Act, 1992': 'the-rehabilitation-council-of-india-act-1992', 'The Hindu Succession Act, 1956': 'the-hindu-succession-act-1956', 'The Slum Areas (Improvement and Clearance) Act, 1956': 'the-slum-areas-improvement-and-clearance-act-1956', 'The Securitisation and Reconstruction of Financial Assets and Enforcement of Security Interest Act, 2002': 'the-securitisation-and-reconstruction-of-financial-assets-and-enforcement-of-security-interest-act-2002', 'The Pre-conception and Pre-natal Diagnostic Techniques (Prohibition of Sex Selection) Act, 1994': 'the-pre-conception-and-pre-natal-diagnostic-techniques-prohibition-of-sex-selection-act-1994', 'The Khaddar (Protection of Name) Act, 1950': 'the-khaddar-protection-of-name-act-1950', 'The Chemical Weapons Convention Act, 2000': 'the-chemical-weapons-convention-act-2000', 'The Guardians and Wards Act, 1890': 'the-guardians-and-wards-act-1890', 'The Mangrol and Manavadar (Administration of Property) Act, 1949': 'the-mangrol-and-manavadar-administration-of-property-act-1949', 'The Narcotic Drugs and Psychotropic Substances Act, 1985': 'the-narcotic-drugs-and-psychotropic-substances-act-1985', 'The Press and Registration of Books Act, 1867': 'the-press-and-registration-of-books-act-1867', 'The National Bank for Agriculture and Rural Development Act, 1981': 'the-national-bank-for-agriculture-and-rural-development-act-1981', 'The Warehousing Corporations\xa0\xa0Act, 1962': 'the-warehousing-corporations-act-1962', 'The Oilfields (Regulation and Development) Act, 1948': 'the-oilfields-regulation-and-development-act-1948', 'The Tezpur University Act, 1993': 'the-tezpur-university-act-1993', 'The Carriers Act, 1865': 'the-carriers-act-1865', 'The Marine Products Export Development Authority Act, 1972': 'the-marine-products-export-development-authority-act-1972', 'The Working Journalists (Fixation of Rates of Wages) Act, 1958': 'the-working-journalists-fixation-of-rates-of-wages-act-1958', 'The Diplomatic Relations (Vienna Convention) Act, 1972': 'the-diplomatic-relations-vienna-convention-act-1972', 'The Dowry Prohibition Act, 1961': 'the-dowry-prohibition-act-1961', 'The Court-fees (Delhi Amendment) Act, 1967': 'the-court-fees-delhi-amendment-act-1967', 'The Bengal, Agra and Assam Civil Courts Act, 1887': 'the-bengal-agra-and-assam-civil-courts-act-1887', 'The National Housing Bank Act, 1987': 'the-national-housing-bank-act-1987', 'The Haryana Gauvansh Sanrakshan and Gausamvardhan Act, 2015 Haryana Act No. 20 of 2015': 'the-haryana-gauvansh-sanrakshan-and-gausamvardhan-act-2015-haryana-act-no-20-of-2015', 'The Manipur (Courts) Act, 1955': 'the-manipur-courts-act-1955', 'The Delhi and Ajmer Rent Control (Nasirabad Cantonment Repeal) Act, 1968': 'the-delhi-and-ajmer-rent-control-nasirabad-cantonment-repeal-act-1968', 'The Terminal Tax on Railway Passengers\xa0Act, 1956': 'the-terminal-tax-on-railway-passengers-act-1956', 'The Places of Worship (Special Provisions) Act, 1991': 'the-places-of-worship-special-provisions-act-1991', 'The Companies (Donations to National Funds) Act, 1951': 'the-companies-donations-to-national-funds-act-1951', 'The Manipur Panchayati Raj Act, 1994': 'the-manipur-panchayati-raj-act-1994', 'The Union Duties of Excise (Distribution) Act, 1979': 'the-union-duties-of-excise-distribution-act-1979', 'The Apprentices Act, 1961': 'the-apprentices-act-1961', 'The Metro Railways (Construction of Works) Act, 1978': 'the-metro-railways-construction-of-works-act-1978', 'The Anand Marriage Act, 1909': 'the-anand-marriage-act-1909', 'The Indian Railway Board Act, 1905': 'the-indian-railway-board-act-1905', 'The Industrial Finance Corporation (Transfer of Undertaking and Repeal) Act, 1993': 'the-industrial-finance-corporation-transfer-of-undertaking-and-repeal-act-1993', 'The State Bank of India Act, 1955': 'the-state-bank-of-india-act-1955', 'The Maritime Zones of India (Regulation of Fishing by Foreign Vessels) Act, 1981': 'the-maritime-zones-of-india-regulation-of-fishing-by-foreign-vessels-act-1981', 'The Sales Promotion Employees (Conditions of Service) Act, 1976': 'the-sales-promotion-employees-conditions-of-service-act-1976', 'The Emergency Risks (Undertakings) Insurance Act, 1971': 'the-emergency-risks-undertakings-insurance-act-1971', 'The Anti-Apartheid (United Nations Convention) Act, 1981': 'the-anti-apartheid-united-nations-convention-act-1981', 'The Delhi Hotels (Control of Accommodation) Act, 1949': 'the-delhi-hotels-control-of-accommodation-act-1949', 'The Jammu and Kashmir Representation of the People (Supplementary) Act, 1968': 'the-jammu-and-kashmir-representation-of-the-people-supplementary-act-1968', 'The Indian Post Office Act, 1898': 'the-indian-post-office-act-1898', 'The Contingency Fund of India Act, 1950': 'the-contingency-fund-of-india-act-1950', 'The Haryana and Punjab Agricultural Universities Act, 1970': 'the-haryana-and-punjab-agricultural-universities-act-1970', 'The Public Employment (Requirement as to Residence) Act, 1957': 'the-public-employment-requirement-as-to-residence-act-1957', 'The Hindu Disposition of Property Act, 1916': 'the-hindu-disposition-of-property-act-1916', 'The Representation of the People Act, 1951': 'the-representation-of-the-people-act-1951', 'The Lushai Hills District (Change of Name) Act, 1954': 'the-lushai-hills-district-change-of-name-act-1954', 'The Mizoram University Act, 2000': 'the-mizoram-university-act-2000', 'The Technology Development Board Act, 1995': 'the-technology-development-board-act-1995', 'The Control of National Highways (Land and Traffic) Act, 2002': 'the-control-of-national-highways-land-and-traffic-act-2002', 'The Provisions of the Panchayats (Extension to the Scheduled Areas) Act, 1996': 'the-provisions-of-the-panchayats-extension-to-the-scheduled-areas-act-1996', 'The Public Provident Fund Act, 1968': 'the-public-provident-fund-act-1968', 'The Jute Companies (Nationalisation) Act, 1980': 'the-jute-companies-nationalisation-act-1980', 'The Coffee Act, 1942': 'the-coffee-act-1942', 'The Union Territories (Separation of Judicial and Executive Functions) Act, 1969': 'the-union-territories-separation-of-judicial-and-executive-functions-act-1969', 'The Usurious Loans Act, 1918': 'the-usurious-loans-act-1918', 'The Personal Injuries (Compensation Insurance) Act, 1963': 'the-personal-injuries-compensation-insurance-act-1963', 'The Human Immunodeficiency Virus and Acquired Immune Deficiency Syndrome (Prevention and Control) Act, 2017': 'the-human-immunodeficiency-virus-and-acquired-immune-deficiency-syndrome-prevention-and-control-act-2017', 'The Former Secretary of State Service Officers (Conditions of Service) Act, 1972': 'the-former-secretary-of-state-service-officers-conditions-of-service-act-1972', 'The Supreme Court (Number of Judges)\xa0Act, 1956': 'the-supreme-court-number-of-judges-act-1956', 'The Waqf Act, 1995': 'the-waqf-act-1995', 'The Haryana Gauvansh Sanrakkshan and Gausamvardhan Act, 2015': 'the-haryana-gauvansh-sanrakkshan-and-gausamvardhan-act-2015', 'The Metal Corporation (Nationalisation and Miscellaneous Provisions) Act, 1976': 'the-metal-corporation-nationalisation-and-miscellaneous-provisions-act-1976', 'The Footwear Design and Development Institute Act, 2017': 'the-footwear-design-and-development-institute-act-2017', 'The Diplomatic and Consular Officers (Oaths and Fees) Act, 1948': 'the-diplomatic-and-consular-officers-oaths-and-fees-act-1948', 'The Advocates Act, 1961': 'the-advocates-act-1961', 'The Institutes of Technology Act, 1961': 'the-institutes-of-technology-act-1961', 'The Charitable Endowments Act, 1890': 'the-charitable-endowments-act-1890', 'The Gurugram University Act, 2017 (Act No.17 of 2017)': 'the-gurugram-university-act-2017-act-no-17-of-2017', 'The Delhi Primary Education Act, 1960': 'the-delhi-primary-education-act-1960', 'The Contract Labour (Regulation and Abolition) Act, 1970': 'the-contract-labour-regulation-and-abolition-act-1970', 'The Collection of Statistics Act, 2008': 'the-collection-of-statistics-act-2008', 'The State of Mizoram Act, 1986': 'the-state-of-mizoram-act-1986', 'The Prevention of Cruelty to Animals Act, 1960': 'the-prevention-of-cruelty-to-animals-act-1960', 'The Additional Duties of Excise (Textiles and Textile Articles) Act, 1978': 'the-additional-duties-of-excise-textiles-and-textile-articles-act-1978', 'The Pondicherry (Administration)\xa0\xa0Act, 1962': 'the-pondicherry-administration-act-1962', 'The Prevention of Insults to National Honour Act, 1971': 'the-prevention-of-insults-to-national-honour-act-1971', 'The Company Secretaries Act, 1980': 'the-company-secretaries-act-1980', 'The Married Women s Property (Extension) Act, 1959': 'the-married-women-s-property-extension-act-1959', 'The Chandigarh Disturbed Areas Act, 1983': 'the-chandigarh-disturbed-areas-act-1983', 'The Indecent Representation of Women (Prohibition) Act, 1986': 'the-indecent-representation-of-women-prohibition-act-1986', 'The Lalit Kala Akadami (Taking Over of Management) Act, 1997': 'the-lalit-kala-akadami-taking-over-of-management-act-1997', 'The Delhi Municipal Corporation Act, 1957': 'the-delhi-municipal-corporation-act-1957', 'The Beedi Workers Welfare Cess\xa0Act, 1976': 'the-beedi-workers-welfare-cess-act-1976', 'The Delhi Sales Tax Act, 1975': 'the-delhi-sales-tax-act-1975', 'The PUNJAB AYURVEDIC AND UNANI': 'the-punjab-ayurvedic-and-unani', 'The Murshidabad Act, 1891': 'the-murshidabad-act-1891', 'The Auroville Foundation Act, 1988': 'the-auroville-foundation-act-1988', 'The Airports Authority of India Act, 1994': 'the-airports-authority-of-india-act-1994', 'The State of Himachal Pradesh Act, 1970': 'the-state-of-himachal-pradesh-act-1970', 'The Prevention of Terrorism (Repeal) Act, 2004': 'the-prevention-of-terrorism-repeal-act-2004', 'The Oaths Act, 1969': 'the-oaths-act-1969', 'The State-Associated Banks (Miscellaneous Provisions) Act, 1962': 'the-state-associated-banks-miscellaneous-provisions-act-1962', 'The Governors (Emoluments, Allowances and Privileges) Act, 1982': 'the-governors-emoluments-allowances-and-privileges-act-1982', 'The Sick Industrial Companies (Special Provisions) Repeal Act, 2003': 'the-sick-industrial-companies-special-provisions-repeal-act-2003', 'The Terrorist Affected Areas (Special Courts) Act, 1984': 'the-terrorist-affected-areas-special-courts-act-1984', 'The Delhi Development Act, 1957': 'the-delhi-development-act-1957', 'The Dakshina Bharat Hindi Prachar Sabha Act, 1964': 'the-dakshina-bharat-hindi-prachar-sabha-act-1964', 'The Coast Guard Act, 1978': 'the-coast-guard-act-1978', 'The Seaward Artillery Practice Act, 1949': 'the-seaward-artillery-practice-act-1949', 'The Tamil Nadu Agricultural Service Co-operative Societies (Appointment of Special Officers) Amendment Act, 1988': 'the-tamil-nadu-agricultural-service-co-operative-societies-appointment-of-special-officers-amendment-act-1988', 'The Registration Act, 1908': 'the-registration-act-1908', 'The Delhi Co-operative Societies Act, 1972': 'the-delhi-co-operative-societies-act-1972', 'The Intelligence Organisations (Restriction of Rights) Act, 1985': 'the-intelligence-organisations-restriction-of-rights-act-1985', 'The Railway Claims Tribunal Act, 1987': 'the-railway-claims-tribunal-act-1987', 'The University of Allahabad Act, 2005': 'the-university-of-allahabad-act-2005', 'The Damodar Valley Corporation Act, 1948': 'the-damodar-valley-corporation-act-1948', 'The Madhya Pradesh Reorganisation Act, 2000': 'the-madhya-pradesh-reorganisation-act-2000', 'The Export-Import Bank of India Act, 1981': 'the-export-import-bank-of-india-act-1981', 'The National Institutes of Technology, Science Education and Research Act, 2007': 'the-national-institutes-of-technology-science-education-and-research-act-2007', 'The National Company Limited (Acquisition and Transfer of Undertakings) Act, 1980': 'the-national-company-limited-acquisition-and-transfer-of-undertakings-act-1980', 'The Reciprocity Act, 1943': 'the-reciprocity-act-1943', 'The Government of National Capital Territory of Delhi Act, 1991': 'the-government-of-national-capital-territory-of-delhi-act-1991', "The Seamen's Provident Fund Act, 1966": 'the-seamen-s-provident-fund-act-1966', 'The Limitation Act, 1963': 'the-limitation-act-1963', 'The Cinematograph (Amendment) Act, 1973': 'the-cinematograph-amendment-act-1973', 'The Presidency-towns Insolvency Act, 1909': 'the-presidency-towns-insolvency-act-1909', 'The Live-stock Importation Act, 1898': 'the-live-stock-importation-act-1898', 'The National Waterway (Talcher-Dhamra Stretch of Rivers, Geonkhali-Charbatia Stretch of East Coast Canal, Charbatia-Dhamra Stretch of Matai River and Mahanadi Delta Rivers) Act, 2008': 'the-national-waterway-talcher-dhamra-stretch-of-rivers-geonkhali-charbatia-stretch-of-east-coast-canal-charbatia-d', 'The Salary, Allowances and Pension of Members of Parliament Act, 1954': 'the-salary-allowances-and-pension-of-members-of-parliament-act-1954', 'The Payment of Gratuity Act, 1972': 'the-payment-of-gratuity-act-1972', 'The Goa, Daman and Diu (Extension of the Code of Civil Procedure and the Arbitration Act) Act, 1965': 'the-goa-daman-and-diu-extension-of-the-code-of-civil-procedure-and-the-arbitration-act-act-1965', 'The Goa, Daman and Diu (Administration) Act, 1962': 'the-goa-daman-and-diu-administration-act-1962', 'The Public Sector Iron and Steel Companies (Restructuring) and Miscellaneous Provisions Act, 1978': 'the-public-sector-iron-and-steel-companies-restructuring-and-miscellaneous-provisions-act-1978', 'The Indian Succession Act, 1925': 'the-indian-succession-act-1925', 'The Navy Act, 1957': 'the-navy-act-1957', 'The Coastal Aquaculture Authority Act, 2005': 'the-coastal-aquaculture-authority-act-2005', 'The Trade Marks Act, 1999': 'the-trade-marks-act-1999', 'The Drugs and Cosmetics Act, 1940': 'the-drugs-and-cosmetics-act-1940', 'The Emigration Act, 1983': 'the-emigration-act-1983', 'The Integrated Goods and Services Tax Act, 2017': 'the-integrated-goods-and-services-tax-act-2017', 'The Government Securities Act, 2006': 'the-government-securities-act-2006', 'The Nagaland University Act, 1989': 'the-nagaland-university-act-1989', 'The Calcutta Metro Railway (Operation and Maintenance) Temporary Provisions Act, 1985': 'the-calcutta-metro-railway-operation-and-maintenance-temporary-provisions-act-1985', 'The Prohibition of Child Marriage Act, 2006': 'the-prohibition-of-child-marriage-act-2006', 'The Assam University Act, 1989': 'the-assam-university-act-1989', 'The Shipping Development Fund Committee (Abolition) Act, 1986': 'the-shipping-development-fund-committee-abolition-act-1986', 'The Indian Penal Code, 1860': 'the-indian-penal-code-1860', 'The Mahatma Gandhi National Rural Employment Guarantee Act, 2005': 'the-mahatma-gandhi-national-rural-employment-guarantee-act-2005', 'The India Tolls Act, 1851': 'the-india-tolls-act-1851', 'The Weapons of Mass Destruction and their Delivery Systems (Prohibition of Unlawful Activities) Act, 2005': 'the-weapons-of-mass-destruction-and-their-delivery-systems-prohibition-of-unlawful-activities-act-2005', 'The Chartered Accountants Act, 1949': 'the-chartered-accountants-act-1949', 'The Foreign Exchange Management Act, 1999': 'the-foreign-exchange-management-act-1999', 'The High Denomination Bank Notes (Demonetisation) Act, 1978': 'the-high-denomination-bank-notes-demonetisation-act-1978', 'The Jallianwala Bagh National Memorial Act, 1951': 'the-jallianwala-bagh-national-memorial-act-1951', 'The Indian Ports Act, 1908': 'the-indian-ports-act-1908', 'The Regional Rural Banks Act, 1976': 'the-regional-rural-banks-act-1976', 'The North-Eastern Areas (Reorganisation) Act, 1971': 'the-north-eastern-areas-reorganisation-act-1971', 'The Army Act, 1950': 'the-army-act-1950', 'The Legislative Assembly of Nagaland (Change in Representation) Act, 1968': 'the-legislative-assembly-of-nagaland-change-in-representation-act-1968', 'The Limestone and Dolomite Mines Labour Welfare Fund (Amendment) Act, 1982': 'the-limestone-and-dolomite-mines-labour-welfare-fund-amendment-act-1982', 'The National Council for Teacher Education Act, 1993': 'the-national-council-for-teacher-education-act-1993', 'The Co-operative Societies Act, 1912': 'the-co-operative-societies-act-1912', 'The Industries (Development and Regulation) Act, 1951': 'the-industries-development-and-regulation-act-1951', 'The Multi-State Co-operative Societies Act, 2002': 'the-multi-state-co-operative-societies-act-2002', 'The Bihar Reorganisation Act, 2000': 'the-bihar-reorganisation-act-2000', 'The National Judicial Appointments Commission Act, 2014': 'the-national-judicial-appointments-commission-act-2014', 'The Architects Act, 1972': 'the-architects-act-1972', 'The Gresham and Craven of India (Private) Limited (Acquisition and Transfer of Undertakings) Act, 1977': 'the-gresham-and-craven-of-india-private-limited-acquisition-and-transfer-of-undertakings-act-1977', 'The Prisons Act, 1894': 'the-prisons-act-1894', 'The Armed Forces (Punjab and Chandigarh) Special Powers Act, 1983': 'the-armed-forces-punjab-and-chandigarh-special-powers-act-1983', 'The Banaras Hindu University Act, 1915': 'the-banaras-hindu-university-act-1915', 'The Right to Fair Compensation and Transparency in Land Acquisition, Rehabilitation and Resettlement Act, 2013': 'the-right-to-fair-compensation-and-transparency-in-land-acquisition-rehabilitation-and-resettlement-act-2013', 'The High Court at Patna (Establishment of a Permanent Bench at Ranchi)\xa0Act, 1976': 'the-high-court-at-patna-establishment-of-a-permanent-bench-at-ranchi-act-1976', 'The Personal Injuries (Emergency Provisions) Act, 1962': 'the-personal-injuries-emergency-provisions-act-1962', 'The Biological Diversity Act, 2002': 'the-biological-diversity-act-2002', 'The Delimitation Act, 2002': 'the-delimitation-act-2002', 'The National Highways Authority of India Act, 1988': 'the-national-highways-authority-of-india-act-1988', 'The Delivery of Books and Newspapers (Public Libraries) Act, 1954': 'the-delivery-of-books-and-newspapers-public-libraries-act-1954', 'The Gold Bonds (Immunities and Exemptions) Act, 1993': 'the-gold-bonds-immunities-and-exemptions-act-1993', 'The North-Eastern Council Act, 1971': 'the-north-eastern-council-act-1971', 'The Delhi High Court Act, 1966': 'the-delhi-high-court-act-1966', 'The Inland Waterways Authority of India Act, 1985': 'the-inland-waterways-authority-of-india-act-1985', 'The Prevention of Damage to Public Property Act, 1984': 'the-prevention-of-damage-to-public-property-act-1984', 'The Indian Stamp Act, 1899': 'the-indian-stamp-act-1899', 'The Labour Laws (Exemption from Furnishing Returns and Maintaining Registers by certain Establishments) Act, 1988': 'the-labour-laws-exemption-from-furnishing-returns-and-maintaining-registers-by-certain-establishments-act-1988', 'The Tripura Land Revenue and Land Reforms Act, 1960': 'the-tripura-land-revenue-and-land-reforms-act-1960', 'The Iron Ore Mines, Manganese Ore Mines and Chrome Ore Mines Labour Welfare Fund Act, 1976': 'the-iron-ore-mines-manganese-ore-mines-and-chrome-ore-mines-labour-welfare-fund-act-1976', 'The Naga Hills-Tuensang Area Act, 1957': 'the-naga-hills-tuensang-area-act-1957', 'The National Rural Employment Guarantee (Extension to Jammu and Kashmir) Act, 2007': 'the-national-rural-employment-guarantee-extension-to-jammu-and-kashmir-act-2007', 'The High Courts (Seals) Act, 1950': 'the-high-courts-seals-act-1950', 'The Departmentalisation of Union Accounts (Transfer of Personnel)\xa0Act, 1976': 'the-departmentalisation-of-union-accounts-transfer-of-personnel-act-1976', 'The Telegraph Wires (Unlawful Possession) Act, 1950': 'the-telegraph-wires-unlawful-possession-act-1950', 'The Brahmaputra Board Act, 1980': 'the-brahmaputra-board-act-1980', 'The Dourine Act, 1910': 'the-dourine-act-1910', 'The Armed Forces (Special Powers) Act, 1958': 'the-armed-forces-special-powers-act-1958', 'The Delhi Urban Art Commission Act, 1973': 'the-delhi-urban-art-commission-act-1973', 'The Homoeopathy Central Council \xa0Act, 1973': 'the-homoeopathy-central-council-act-1973', 'The Public Debt Act, 1944': 'the-public-debt-act-1944', 'The Seeds Act, 1966': 'the-seeds-act-1966', 'The Rajasthan and Madhya Pradesh (Transfer of Territories) Act, 1959': 'the-rajasthan-and-madhya-pradesh-transfer-of-territories-act-1959', 'The Spices Board Act, 1986': 'the-spices-board-act-1986', 'The Madras Civil Courts Act, 1873': 'the-madras-civil-courts-act-1873', 'The Dissolution of Muslim Marriages Act, 1939': 'the-dissolution-of-muslim-marriages-act-1939', 'The National Dairy Development Board Act, 1987': 'the-national-dairy-development-board-act-1987', 'The Textiles Committee Act, 1963': 'the-textiles-committee-act-1963', 'The Export (Quality Control and Inspection) Act, 1963': 'the-export-quality-control-and-inspection-act-1963', 'The Expenditure-tax Act, 1987': 'the-expenditure-tax-act-1987', 'The Betwa River Board (Amendment) Act, 1993': 'the-betwa-river-board-amendment-act-1993', 'The Compensatory Afforestation Fund Act, 2016.': 'the-compensatory-afforestation-fund-act-2016', 'The Public Records Act, 1993': 'the-public-records-act-1993', 'The Departmental Inquiries (Enforcement of Attendance of Witnesses and Production of Documents) Act, 1972': 'the-departmental-inquiries-enforcement-of-attendance-of-witnesses-and-production-of-documents-act-1972', 'The Bombay Reorganisation Act, 1960': 'the-bombay-reorganisation-act-1960', 'The Northern India Canal and Drainage Act, 1873': 'the-northern-india-canal-and-drainage-act-1873', 'The Parliament (Prevention of Disqualification) Act, 1959': 'the-parliament-prevention-of-disqualification-act-1959', 'The Pondicherry (Extension of Laws) Act, 1968': 'the-pondicherry-extension-of-laws-act-1968', 'The Central Laws (Extension to Jammu and Kashmir) Act, 1968': 'the-central-laws-extension-to-jammu-and-kashmir-act-1968', 'The Indian Wireless Telegraphy Act, 1933': 'the-indian-wireless-telegraphy-act-1933', 'The English and Foreign Languages University Act, 2006': 'the-english-and-foreign-languages-university-act-2006', 'The Scheduled Areas (Assimilation of Laws) Act, 1951': 'the-scheduled-areas-assimilation-of-laws-act-1951', 'The Registration of Births and Deaths Act, 1969': 'the-registration-of-births-and-deaths-act-1969', 'The East Punjab Urban Rent Restriction Act (Extension to Chandigarh) Act, 1974': 'the-east-punjab-urban-rent-restriction-act-extension-to-chandigarh-act-1974', 'The Government Savings Banks Act, 1873': 'the-government-savings-banks-act-1873', 'The Foreign Trade (Development and Regulation) Act, 1992': 'the-foreign-trade-development-and-regulation-act-1992', 'The Drugs and Magic Remedies (Objectionable Advertisement) Act, 1954': 'the-drugs-and-magic-remedies-objectionable-advertisement-act-1954', 'The Poisons Act, 1919': 'the-poisons-act-1919', 'The Supreme Court (Enlargement of Criminal Appellate Jurisdiction) Act, 1970': 'the-supreme-court-enlargement-of-criminal-appellate-jurisdiction-act-1970', 'The Right of Information Act, 2005': 'the-right-of-information-act-2005', 'The Warehousing Corporations (Supplementary) Act, 1965': 'the-warehousing-corporations-supplementary-act-1965', 'The Public Accountants Defaults Act, 1850': 'the-public-accountants-defaults-act-1850', 'The Cinematograph Act, 1952': 'the-cinematograph-act-1952', 'The Press Council Act, 1978': 'the-press-council-act-1978', 'The Majority Act, 1875': 'the-majority-act-1875', 'The Indian Maritime University Act, 2008': 'the-indian-maritime-university-act-2008', 'The The Leaders and Chief Whips of Recognised Parties and Groups in Parliament (Facilities) Amendment Act, 2000': 'the-the-leaders-and-chief-whips-of-recognised-parties-and-groups-in-parliament-facilities-amendment-act-2000', 'The Northern Indian Ferries Act, 1878': 'the-northern-indian-ferries-act-1878', 'The Bombay Revenue Jurisdiction Act, 1875': 'the-bombay-revenue-jurisdiction-act-1875', 'The Inter-State Corporations Act, 1957': 'the-inter-state-corporations-act-1957', 'The Britannia Engineering Company Limited (Mokameh Unit) and the Arthur Butler and Company (Muzaffarpore) Limited (Acquisition and Transfer of Undertakings) Act, 1978': 'the-britannia-engineering-company-limited-mokameh-unit-and-the-arthur-butler-and-company-muzaffarpore-limited-ac', 'The Merchant Shipping Act, 1958': 'the-merchant-shipping-act-1958', 'The International Development Association (Status, Immunities and Privileges) Act, 1960': 'the-international-development-association-status-immunities-and-privileges-act-1960', 'The Cost and Works Accountants Act, 1959': 'the-cost-and-works-accountants-act-1959', 'The Bengal Finance (Sales Tax) (Delhi Validation of Appointments and Proceedings) Act, 1971': 'the-haryana-housing-board-act-1971', 'The Agricultural Produce (Grading and Marking) Act, 1937': 'the-agricultural-produce-grading-and-marking-act-1937', 'The Gold (Control) Amendment Act, 1971': 'the-gold-control-amendment-act-1971', 'The Public Gambling Act, 1867': 'the-public-gambling-act-1867', 'The African Development Fund Act, 1982': 'the-african-development-fund-act-1982', 'The Sale of Goods Act, 1930': 'the-sale-of-goods-act-1930', 'The Agricultural and Processed Food Products Export Development Authority Act, 1985': 'the-agricultural-and-processed-food-products-export-development-authority-act-1985', 'The Indian Bills of Landing Act 1856': 'the-indian-bills-of-landing-act-1856', 'The Geographical Indications of Goods (Registration and Protection) Act, 1999': 'the-geographical-indications-of-goods-registration-and-protection-act-1999', 'The Citizenship Act, 1955': 'the-citizenship-act-1955', 'The Unlawful Activities (Prevention) Act, 1967': 'the-unlawful-activities-prevention-act-1967', 'The Commercial Documents Evidence Act, 1939': 'the-commercial-documents-evidence-act-1939', 'The The Parliamentary Proceedings (Protection of Publication) Act, 1977': 'the-the-parliamentary-proceedings-protection-of-publication-act-1977', 'The Hindi Sahitya Sammelan Act, 1962': 'the-hindi-sahitya-sammelan-act-1962', 'The Smugglers and Foreign Exchange Manipulators (Forfeiture of Property) Act, 1976': 'the-smugglers-and-foreign-exchange-manipulators-forfeiture-of-property-act-1976', 'The National Institute of Mental Health and Neuro-Sciences, Bangalore Act, 2012': 'the-national-institute-of-mental-health-and-neuro-sciences-bangalore-act-2012', 'The Customs Tariff (Amendment) Act, 1982': 'the-customs-tariff-amendment-act-1982', 'The Powers-of Attorney Act, 1882': 'the-powers-of-attorney-act-1882', 'The Rampur Raza Library\xa0\xa0Act, 1975': 'the-rampur-raza-library-act-1975', 'The Manipur University Act, 2005': 'the-manipur-university-act-2005', 'The Salar Jung Museum Act, 1961': 'the-salar-jung-museum-act-1961', 'The Air (Prevention and Control of Pollution) Act, 1981': 'the-air-prevention-and-control-of-pollution-act-1981', 'The Sick Textile Undertakings (Nationalisation) Act, 1974': 'the-sick-textile-undertakings-nationalisation-act-1974', 'The International Monetary Fund and Bank Act, 1945': 'the-international-monetary-fund-and-bank-act-1945', 'The Representation of the People (Amendment) Act, 1989': 'the-representation-of-the-people-amendment-act-1989', 'The Richardson and Cruddas Limited (Acquisition and Transfer of Undertaking) Act, 1972': 'the-richardson-and-cruddas-limited-acquisition-and-transfer-of-undertaking-act-1972', 'The Transformer and Switchgear Limited (Acquisition and Transfer of Undertakings) Act, 1983': 'the-transformer-and-switchgear-limited-acquisition-and-transfer-of-undertakings-act-1983', "The President's Emoluments and Pension Act, 1951": 'the-president-s-emoluments-and-pension-act-1951', 'The Pension Fund Regulatory and Development Authority Act, 2013': 'the-pension-fund-regulatory-and-development-authority-act-2013', 'The Indian Bar Councils Act, 1926': 'the-indian-bar-councils-act-1926', 'The Prize Competitions Act, 1955': 'the-prize-competitions-act-1955', 'The Births, Deaths and Marriages Registration Act, 1886': 'the-births-deaths-and-marriages-registration-act-1886', 'The Nalanda University Act, 2010': 'the-nalanda-university-act-2010', 'The President (Discharge of Functions) Act, 1969': 'the-president-discharge-of-functions-act-1969', 'The National Service Act, 1972': 'the-national-service-act-1972', 'The Legal Services Authorities Act, 1987': 'the-legal-services-authorities-act-1987', 'The Indira Gandhi National Open University Act, 1985': 'the-indira-gandhi-national-open-university-act-1985', 'The Offshore Areas Mineral (Development and Regulation) Act, 2002': 'the-offshore-areas-mineral-development-and-regulation-act-2002', 'The National Security Guard Act, 1986': 'the-national-security-guard-act-1986', 'The Rani Lakshmi Bai Central Agricultural University Act, 2014': 'the-rani-lakshmi-bai-central-agricultural-university-act-2014', 'The Warehousing (Development and Regulation) Act, 2007': 'the-warehousing-development-and-regulation-act-2007', 'The Life Insurance Corporation Act, 1956': 'the-life-insurance-corporation-act-1956', 'The Railway Property (Unlawful Possession) Act, 1966': 'the-railway-property-unlawful-possession-act-1966', 'The Acquired Territories (Merger) Act, 1960': 'the-acquired-territories-merger-act-1960', 'The Ancient Monuments and Archaeological Sites and Remains Act, 1958': 'the-ancient-monuments-and-archaeological-sites-and-remains-act-1958', 'The Petroleum and Natural Gas Regulatory Board Act, 2006': 'the-petroleum-and-natural-gas-regulatory-board-act-2006', 'The The Preference Shares (Regulation of Dividends) Act, 1960': 'the-preference-shares-regulation-of-dividends-act-1960', 'The Special Marriage Act, 1954': 'the-special-marriage-act-1954', 'The United Nations (Privileges and Immunities) Act, 1947': 'the-united-nations-privileges-and-immunities-act-1947', 'The Faridabad Development Corporation Act, 1956': 'the-faridabad-development-corporation-act-1956', 'The Provisional Collection of Taxes Act, 1931': 'the-provisional-collection-of-taxes-act-1931', 'The Designs Act, 2000': 'the-designs-act-2000', 'The Rajiv Gandhi Institute of Petroleum Technology Act, 2007': 'the-rajiv-gandhi-institute-of-petroleum-technology-act-2007', 'The Multimodal Transportation of Goods Act, 1993': 'the-multimodal-transportation-of-goods-act-1993', 'The Coal Grading Board (Repeal) Act, 1959': 'the-coal-grading-board-repeal-act-1959', 'The Chit Funds Act, 1982': 'the-chit-funds-act-1982', 'The National Environment Appellate Authority Act, 1997': 'the-national-environment-appellate-authority-act-1997', 'The Territorial Waters, Continental Shelf, Exclusive Economic Zone and Other Maritime Zones Act, 1976': 'the-territorial-waters-continental-shelf-exclusive-economic-zone-and-other-maritime-zones-act-1976', 'The Passports Act, 1967': 'the-passports-act-1967', 'The Indian Partnership Act, 1932': 'the-indian-partnership-act-1932', 'The Clinical Establishments (Registration and Regulation) Act, 2010': 'the-clinical-establishments-registration-and-regulation-act-2010', 'The Sir Jamsetjee Jejeebhoy Baronetcy Act, 1915': 'the-sir-jamsetjee-jejeebhoy-baronetcy-act-1915', 'The Cantonments Act, 2006': 'the-cantonments-act-2006', 'The Central Reserve Police Force Act, 1949': 'the-central-reserve-police-force-act-1949', 'The New Delhi Municipal Council Act, 1994': 'the-new-delhi-municipal-council-act-1994', 'The The Hindu Marriage (Validation of Proceedings) Act, 1960': 'the-the-hindu-marriage-validation-of-proceedings-act-1960', 'The Indian Medicine Central Council Act, 1970': 'the-indian-medicine-central-council-act-1970', 'The Gujarat Legislative Assembly Members (Removal of Disqualifications) Act, 1960': 'the-gujarat-legislative-assembly-members-removal-of-disqualifications-act-1960', 'The Rajiv Gandhi National Institute of Youth Development Act, 2012': 'the-rajiv-gandhi-national-institute-of-youth-development-act-2012', 'The Central Industrial Security Force\xa0\xa0Act, 1968': 'the-central-industrial-security-force-act-1968', 'The Income-tax Act, 1961': 'the-income-tax-act-1961', 'The Rehabilitation Finance Administration Act, 1948': 'the-rehabilitation-finance-administration-act-1948', 'The Goa, Daman and Diu Mining Concessions (Abolition and Declaration as Mining Leases) Act, 1987': 'the-goa-daman-and-diu-mining-concessions-abolition-and-declaration-as-mining-leases-act-1987', 'The Scheduled Tribes and Other Traditional Forest Dwellers (Recognition of Forest Rights) Act, 2006': 'the-scheduled-tribes-and-other-traditional-forest-dwellers-recognition-of-forest-rights-act-2006', 'The Protection of Civil Rights Act, 1955': 'the-protection-of-civil-rights-act-1955', 'The Societies Registration Act, 1860': 'the-societies-registration-act-1860', 'The Indian Statistical Institute Act, 1959': 'the-indian-statistical-institute-act-1959', 'The Indira Gandhi National Tribal University Act, 2007': 'the-indira-gandhi-national-tribal-university-act-2007', 'The Employment of Manual Scavengers and Construction of Dry Latrines (Prohibition) Act, 1993': 'the-employment-of-manual-scavengers-and-construction-of-dry-latrines-prohibition-act-1993', 'The Capital of Punjab (Development and Regulation) (Chandigarh Amendment) Act, 1973': 'the-capital-of-punjab-development-and-regulation-chandigarh-amendment-act-1973', 'The Glanders and Farcy Act, 1899': 'the-glanders-and-farcy-act-1899', 'The Coal Mines Labour Welfare Fund (Repeal) Act, 1986': 'the-coal-mines-labour-welfare-fund-repeal-act-1986', 'The Haj Committee Act, 2002': 'the-haj-committee-act-2002', 'The Hotel-Receipts Tax Act, 1980': 'the-hotel-receipts-tax-act-1980', "The Married Women's Property Act, 1874": 'the-married-women-s-property-act-1874', 'The Ajmer Tenancy and Land Records Act, 1950': 'the-ajmer-tenancy-and-land-records-act-1950', 'The Armed Forces Tribunal Act, 2007': 'the-armed-forces-tribunal-act-2007', 'The National Capital Region Planning Board Act, 1985': 'the-national-capital-region-planning-board-act-1985', 'The Jawaharlal Nehru University Act, 1966': 'the-jawaharlal-nehru-university-act-1966', 'The Competition Act, 2002': 'the-competition-act-2002', 'The Tokyo Convention Act, 1975': 'the-tokyo-convention-act-1975', 'The Iron Ore Mines, Manganese Ore Mines and Chrome Ore Mines Labour Welfare Cess Act, 1976': 'the-iron-ore-mines-manganese-ore-mines-and-chrome-ore-mines-labour-welfare-cess-act-1976', 'The Indian Museum Act, 1910': 'the-indian-museum-act-1910', 'The Unit Trust of India (Transfer of Undertaking and Repeal) Act, 2002': 'the-unit-trust-of-india-transfer-of-undertaking-and-repeal-act-2002', 'The Motor Transport Workers Act, 1961': 'the-motor-transport-workers-act-1961', 'The Provincial Insolvency Act, 1920': 'the-provincial-insolvency-act-1920', 'The Oil Industry (Development) Act, 1974': 'the-oil-industry-development-act-1974', 'The Road Transport Corporations Act, 1950': 'the-road-transport-corporations-act-1950', 'The Kalakshetra Foundation Act, 1993': 'the-kalakshetra-foundation-act-1993', 'The Public Employment (Requirement as to Residence) Amendment Act, 1964': 'the-public-employment-requirement-as-to-residence-amendment-act-1964', 'The Tea Companies (Acquisition and Transfer of Sick Tea Units) Act, 1985': 'the-tea-companies-acquisition-and-transfer-of-sick-tea-units-act-1985', 'The Bihar and West Bengal (Transfer of Territories) Act, 1956': 'the-bihar-and-west-bengal-transfer-of-territories-act-1956', 'The East Punjab Urban Rent Restriction (Chandigarh Amendment) Act, 1982': 'the-east-punjab-urban-rent-restriction-chandigarh-amendment-act-1982', 'The Air Force Act, 1950': 'the-air-force-act-1950', 'The Police-Forces (Restriction of Rights) Act, 1966': 'the-police-forces-restriction-of-rights-act-1966', 'The Petroleum and Minerals Pipelines (Acquisition of Right of User in Land) Act, 1962': 'the-petroleum-and-minerals-pipelines-acquisition-of-right-of-user-in-land-act-1962', 'The Indian Veterinary Council Act, 1984': 'the-indian-veterinary-council-act-1984', 'The Industrial Employmnet (Standing Orders) Act, 1946': 'the-industrial-employmnet-standing-orders-act-1946', 'The National Green Tribunal Act, 2010': 'the-national-green-tribunal-act-2010', 'The Conservation of Foreign Exchange and Prevention of Smuggling Activities Act, 1974': 'the-conservation-of-foreign-exchange-and-prevention-of-smuggling-activities-act-1974', 'The Colonial Courts of Admiralty (India) Act, 1891': 'the-colonial-courts-of-admiralty-india-act-1891', 'The Assam Reorganisation (Meghalaya) Act, 1969': 'the-assam-reorganisation-meghalaya-act-1969', 'The Comptroller Auditor-General s (Duties, Powers and Conditions of Service) Act, 1971': 'the-comptroller-auditor-general-s-duties-powers-and-conditions-of-service-act-1971', 'The Equal Remuneration Act, 1976': 'the-equal-remuneration-act-1976', 'The Essential Commodities Act, 1955': 'the-essential-commodities-act-1955', 'The Central Agricultural University Act, 1992': 'the-central-agricultural-university-act-1992', 'The Khadi and other Handloom Industries Development (Additional Excise Duty on Cloth) Amendment Act, 1972': 'the-khadi-and-other-handloom-industries-development-additional-excise-duty-on-cloth-amendment-act-1972', 'The Haryana Backward Classes Commission Act, 2016': 'the-haryana-backward-classes-commission-act-2016', 'The Delhi Special Police Establishment Act, 1946': 'the-delhi-special-police-establishment-act-1946', 'The Railways (Employment of Members of the Armed Forces) Act, 1965': 'the-railways-employment-of-members-of-the-armed-forces-act-1965', 'The Scheduled Castes and Scheduled Tribes Orders (Amendment) Act, 1976': 'the-scheduled-castes-and-scheduled-tribes-orders-amendment-act-1976', 'The Diplomatic and Consular Officers (Oaths and Fees) (Extension to Jammu and Kashmir) Act, 1973': 'the-diplomatic-and-consular-officers-oaths-and-fees-extension-to-jammu-and-kashmir-act-1973', 'The Railway Protection Force Act, 1957': 'the-railway-protection-force-act-1957', 'The Banking Companies (Acquisition and Transfer of Undertakings) Act, 1970': 'the-banking-companies-acquisition-and-transfer-of-undertakings-act-1970', 'The Manipur Land Revenue and Land Reforms Act, 1960': 'the-manipur-land-revenue-and-land-reforms-act-1960', 'The Pondicherry (Alteration of Name) Act, 2006': 'the-pondicherry-alteration-of-name-act-2006', 'The Coasting vessels Act, 1838': 'the-coasting-vessels-act-1838', 'The Jammu and Kashmir Representation of the People Act 1957 (4 of 1957)': 'the-jammu-and-kashmir-representation-of-the-people-act-1957-4-of-1957', 'The Right of Children to Free and Compulsory Education Act, 2009': 'the-right-of-children-to-free-and-compulsory-education-act-2009', 'The Delhi and Ajmer Rent Control Act, 1952': 'the-delhi-and-ajmer-rent-control-act-1952', 'The Life Insurance Corporation (Modification of Settlements) Act, 1976': 'the-life-insurance-corporation-modification-of-settlements-act-1976', 'The Fetal Accidents Act, 1855': 'the-fetal-accidents-act-1855', "The Advocates' Fund Act, 2001": 'the-advocates-fund-act-2001', 'The Central Educational Institutions (Reservation in Admission) Act, 2006': 'the-central-educational-institutions-reservation-in-admission-act-2006', 'The Religious Societies Act, 1880': 'the-religious-societies-act-1880', 'The Prize Chits and Money Circulation Schemes (Banning) Act, 1978': 'the-prize-chits-and-money-circulation-schemes-banning-act-1978', 'The Protection of Children from Sexual Offences Act, 2012': 'the-protection-of-children-from-sexual-offences-act-2012', 'The Punjab Reorganisation Act, 1966': 'the-punjab-reorganisation-act-1966', 'The Indian Naval Armament Act, 1923': 'the-indian-naval-armament-act-1923', 'The Aligarh Muslim University Act, 1920': 'the-aligarh-muslim-university-act-1920', 'The Muslim Women (Protection of Rights on Divorce) Act, 1986': 'the-muslim-women-protection-of-rights-on-divorce-act-1986', 'The Himachal Pradesh and Bilaspur (New State) Act, 1954': 'the-himachal-pradesh-and-bilaspur-new-state-act-1954', 'The Lady Hardinge Medical College and Hospital (Acquisition and Miscellaneous Provisions) Act, 1977': 'the-lady-hardinge-medical-college-and-hospital-acquisition-and-miscellaneous-provisions-act-1977', 'The Religious endowments act, 1863': 'the-religious-endowments-act-1863', 'The All India Council for Technical Education Act, 1987': 'the-all-india-council-for-technical-education-act-1987', 'The National Jute Board Act, 2008': 'the-national-jute-board-act-2008', 'The Protection of Plant Varieties and Farmers Rights Act, 2001': 'the-protection-of-plant-varieties-and-farmers-rights-act-2001', 'The Durgah khawaja Saheb Act, 1955': 'the-durgah-khawaja-saheb-act-1955', 'The Indian Institutes of Information Technology (Public-private Partnership) Act, 2017': 'the-indian-institutes-of-information-technology-public-private-partnership-act-2017', 'The Indian Evidence Act, 1872': 'the-indian-evidence-act-1872', 'The Delhi Land Reforms (Amendment) Act, 1966': 'the-delhi-land-reforms-amendment-act-1966', 'The Mahatma Gandhi Antarrashtriya Hindi Vishwavidyalaya Act, 1996': 'the-mahatma-gandhi-antarrashtriya-hindi-vishwavidyalaya-act-1996', 'The Factoring Regulation Act, 2011': 'the-factoring-regulation-act-2011', 'The Rights of Persons with Disabilities Act, 2016.': 'the-rights-of-persons-with-disabilities-act-2016', 'The Indian Tolls Act, 1864': 'the-indian-tolls-act-1864', 'The The Railway Companies (Emergency Provisions) Act, 1951': 'the-the-railway-companies-emergency-provisions-act-1951', 'The Commissions for Protection of Child Rights Act, 2005': 'the-commissions-for-protection-of-child-rights-act-2005', 'The Judicial Officers Protection Act, 1850': 'the-judicial-officers-protection-act-1850', 'The Central Goods and Services Tax Act, 2017': 'the-central-goods-and-services-tax-act-2017', 'The University Grants Commission Act, 1956': 'the-university-grants-commission-act-1956', 'The Prohibition of Employment as Manual Scavengers and their Rehabilitation Act, 2013': 'the-prohibition-of-employment-as-manual-scavengers-and-their-rehabilitation-act-2013', 'The Defence of India Act, 1962': 'the-defence-of-india-act-1962', 'The Information Technology Act, 2000': 'the-information-technology-act-2000', 'The Beedi and Cigar Workers (Conditions of Employment) Act, 1966': 'the-beedi-and-cigar-workers-conditions-of-employment-act-1966', 'The Legal Representatives Suits Act, 1855': 'the-legal-representatives-suits-act-1855', 'The Carriage by Air Act, 1972': 'the-carriage-by-air-act-1972', 'The Ancient Monuments Preservation Act, 1904': 'the-ancient-monuments-preservation-act-1904', 'The Bihar Value Added Tax Act, 2005': 'the-bihar-value-added-tax-act-2005', 'The Works of Defence Act, 1903': 'the-works-of-defence-act-1903', 'The Metro Railways (Construction of Works) Amendment Act, 1982': 'the-metro-railways-construction-of-works-amendment-act-1982', 'The Gurugram Metropolitan Development Authority Act, 2017 (Haryana Act No.34 of 2017)': 'the-gurugram-metropolitan-development-authority-act-2017-haryana-act-no-34-of-2017', 'The National Institute of Pharmaceutical Education and Research Act, 1998': 'the-national-institute-of-pharmaceutical-education-and-research-act-1998', 'The Energy Conservation Act, 2001': 'the-energy-conservation-act-2001', 'The Delhi High Court (Amendment) Act, 1980': 'the-delhi-high-court-amendment-act-1980', 'The Banking Companies (Second Amendment) Act, 1960': 'the-banking-companies-second-amendment-act-1960', 'The Destruction of Records Act, 1917': 'the-destruction-of-records-act-1917', 'The Civil Defence Act, 1968': 'the-civil-defence-act-1968', 'The Air Corporations (Transfer of Undertakings and Repeal) Act, 1994': 'the-air-corporations-transfer-of-undertakings-and-repeal-act-1994', 'The Regional Centre for Biotechnology Act, 2016.': 'the-regional-centre-for-biotechnology-act-2016', 'The Women s and Children s Institutions (Licensing) Act, 1956': 'the-women-s-and-children-s-institutions-licensing-act-1956', 'The Asiatic Society Act, 1984': 'the-asiatic-society-act-1984', 'The African Development Bank Act, 1983': 'the-african-development-bank-act-1983', 'The Immigration (carriers Liability) Act, 2000': 'the-immigration-carriers-liability-act-2000', 'The Indian Trust Act, 1882': 'the-indian-trust-act-1882', 'The Disaster Management Act, 2005': 'the-disaster-management-act-2005', 'The Salary and Allowances of Leaders of Opposition in Parliament Act, 1977': 'the-salary-and-allowances-of-leaders-of-opposition-in-parliament-act-1977', 'The Maintenance Orders Enforcement Act, 1921': 'the-maintenance-orders-enforcement-act-1921', 'The Specified Bank Notes (Cessation of Liabilities) Act, 2017': 'the-specified-bank-notes-cessation-of-liabilities-act-2017', 'The Displaced Persons (Compensation and Rehabilitation) Amendment Act, 1960': 'the-displaced-persons-compensation-and-rehabilitation-amendment-act-1960', 'The Small Industries Development Bank of India Act, 1989': 'the-small-industries-development-bank-of-india-act-1989', 'The Depositories Act, 1996': 'the-depositories-act-1996', 'The Election Laws (Extension to Sikkim) Act, 1976': 'the-election-laws-extension-to-sikkim-act-1976', 'The Hindu Adoptions and Maintenance Act, 1956': 'the-hindu-adoptions-and-maintenance-act-1956', 'The Private Security Agencies (Regulation) Act, 2005': 'the-private-security-agencies-regulation-act-2005', 'The Government Savings Certificates Act, 1959': 'the-government-savings-certificates-act-1959', 'The Indian Council of World Affairs Act, 2001': 'the-indian-council-of-world-affairs-act-2001', 'The Cantonments (House Accommodation) Act, 1923': 'the-cantonments-house-accommodation-act-1923', 'The Jammu and Kashmir Representation of the People (Supplementary) Act, 1968 (3 of 1968)': 'the-jammu-and-kashmir-representation-of-the-people-supplementary-act-1968-3-of-1968', 'The Smith, Stanistreet and Company Limited (Acquisition and Transfer of Undertakings) Act, 1977': 'the-smith-stanistreet-and-company-limited-acquisition-and-transfer-of-undertakings-act-1977', 'The Delhi Land Holdings (Ceiling) Act, 1960': 'the-delhi-land-holdings-ceiling-act-1960', 'The Government Managemnet of Private Estates Act, 1892': 'the-government-managemnet-of-private-estates-act-1892', 'The Haryana Registration and Regulation of Societies Act, 2012': 'the-haryana-registration-and-regulation-of-societies-act-2012', 'The Indian Divorce Act, 1869': 'the-indian-divorce-act-1869', 'The Sugarcane Act, 1934': 'the-sugarcane-act-1934', 'The Credit Information Companies (Regulation) Act, 2005.': 'the-credit-information-companies-regulation-act-2005', 'The Tobacco Board Act, 1975': 'the-tobacco-board-act-1975', 'The Dock Workers (Regulation of Employment) Amendment Act, 1980': 'the-dock-workers-regulation-of-employment-amendment-act-1980', 'The Provincial Small Cause Courts Act, 1887': 'the-provincial-small-cause-courts-act-1887', 'The Administrative Tribunals Act, 1985': 'the-administrative-tribunals-act-1985', 'The Indian Red Cross Society Act, 1920': 'the-indian-red-cross-society-act-1920', 'The Foreign Aircraft (Exemption from Taxes and Duties on Fuel and Lubricants) Act, 2002': 'the-foreign-aircraft-exemption-from-taxes-and-duties-on-fuel-and-lubricants-act-2002', 'The State Bank of Sikkim (Acquisition of Shares) and Miscellaneous Provisions Act, 1982': 'the-state-bank-of-sikkim-acquisition-of-shares-and-miscellaneous-provisions-act-1982', 'The HARYANA NURSES AND NURSE-MIDWIVES ACT, 2017 (ACT NO.3 OF 2017).': 'the-haryana-nurses-and-nurse-midwives-act-2017-act-no-3-of-2017', 'The Hind Cycles Limited and Sen-Raleigh Limited (Nationalisation) Act, 1980': 'the-hind-cycles-limited-and-sen-raleigh-limited-nationalisation-act-1980', 'The Foreigners Law (Application and Amendment) Act, 1962': 'the-foreigners-law-application-and-amendment-act-1962', 'The Lok Sahayak Sena Act, 1956': 'the-lok-sahayak-sena-act-1956', 'The Repealing and Amending (Second) Act': 'the-repealing-and-amending-second-act', 'The Destructive Insects and Pests Act, 1914': 'the-destructive-insects-and-pests-act-1914', 'The Arbitration and Conciliation Act, 1996': 'the-arbitration-and-conciliation-act-1996', 'The Delhi University Act, 1922': 'the-delhi-university-act-1922', 'The Representation of the People (Amendment) Act, 1966': 'the-representation-of-the-people-amendment-act-1966', 'The Industries (Development and Regulation) Amendment Act, 1984': 'the-industries-development-and-regulation-amendment-act-1984', 'The Reserve Bank (Transfer to Public Ownership) Act, 1948': 'the-reserve-bank-transfer-to-public-ownership-act-1948', 'The Haryana Road Infrastructure Protection Act, 2017': 'the-haryana-road-infrastructure-protection-act-2017', 'The Coal India (Regulation of Transfers and Validation) Act, 2000': 'the-coal-india-regulation-of-transfers-and-validation-act-2000', 'The Indian Christian Marriage Act, 1872': 'the-indian-christian-marriage-act-1872', 'The Hindu Marriage Act, 1955': 'the-hindu-marriage-act-1955', 'The Family Courts Act, 1984': 'the-family-courts-act-1984', 'The Assam Rifles Act, 1941': 'the-assam-rifles-act-1941', 'The Betwa River Board Act, 1976': 'the-betwa-river-board-act-1976', 'The Semiconductor Integrated Circuits Layout-Design Act, 2000': 'the-semiconductor-integrated-circuits-layout-design-act-2000', 'The Insurance Act, 1938': 'the-insurance-act-1938', 'The National Institute of Design Act, 2014': 'the-national-institute-of-design-act-2014', 'The Government Buildings Act, 1899': 'the-government-buildings-act-1899', 'The Code of Civil Procedure Act, 1908': 'the-code-of-civil-procedure-act-1908', 'The Industrial Disputes Act, 1947': 'the-industrial-disputes-act-1947', 'The Cigarettes and Other Tobacco Products (Prohibition of Advertisement and Regulation of Trade and Commerce, Production, Supply and Distribution) Act, 2003': 'the-cigarettes-and-other-tobacco-products-prohibition-of-advertisement-and-regulation-of-trade-and-commerce-product', 'The Authoritative Texts (Central Laws) Act, 1973': 'the-authoritative-texts-central-laws-act-1973', 'The Sashastra Seema Bal Act, 2007': 'the-sashastra-seema-bal-act-2007', 'The Oil and Natural Gas Commission (Transfer of Undertaking and Repeal) Act, 1993': 'the-oil-and-natural-gas-commission-transfer-of-undertaking-and-repeal-act-1993', 'The Explosives Act, 1884': 'the-explosives-act-1884', 'The Central Excise Act 1944': 'the-central-excises-act-1944', 'The Levy Sugar Price Equalisation Fund Act, 1976': 'the-levy-sugar-price-equalisation-fund-act-1976', 'The Food Corporations Act, 1964': 'the-food-corporations-act-1964', 'The All-India Services Act, 1951': 'the-all-india-services-act-1951', 'The Mines and Minerals (Development and Regulation) Act, 1957': 'the-mines-and-minerals-development-and-regulation-act-1957', 'The Food Safety and Standards Act, 2006': 'the-food-safety-and-standards-act-2006', 'The Presidential and Vice-Presidential Elections Act, 1952': 'the-presidential-and-vice-presidential-elections-act-1952', 'The Victoria Memorial Act, 1903': 'the-victoria-memorial-act-1903', 'The Mineral Oils (Additional Duties of Excise and Customs) Amendment Act, 1959': 'the-mineral-oils-additional-duties-of-excise-and-customs-amendment-act-1959', 'The Bombay Civil Courts Act, 1869': 'the-bombay-civil-courts-act-1869', 'The Delhi Lands (Restrictions on Transfer) Act, 1972': 'the-delhi-lands-restrictions-on-transfer-act-1972', 'The Cable Television Networks (Regulation) Act, 1995': 'the-cable-television-networks-regulation-act-1995', 'The Rubber Act, 1947': 'the-rubber-act-1947', 'The Delhi Restriction of Uses of Land Act, 1941': 'the-delhi-restriction-of-uses-of-land-act-1941', 'The Copyright Act, 1957': 'the-copyright-act-1957', 'The Orissa Weights and Measures (Delhi Repeal) Act, 1958': 'the-orissa-weights-and-measures-delhi-repeal-act-1958', 'The National Commission for Minorities Act, 1992': 'the-national-commission-for-minorities-act-1992', 'The Integrated Goods and Services Tax (Extension to Jammu and Kashmir) Act, 2017': 'the-integrated-goods-and-services-tax-extension-to-jammu-and-kashmir-act-2017', 'The Indo-Tibetan Border Police Force Act, 1992': 'the-indo-tibetan-border-police-force-act-1992', 'The Fiscal Responsibility and Budget Management Act, 2003': 'the-fiscal-responsibility-and-budget-management-act-2003', 'The Immoral Traffic (Prevention) Act, 1956': 'the-immoral-traffic-prevention-act-1956', 'The Code of Criminal Procedure (Amendment) Act, 1980': 'the-code-of-criminal-procedure-amendment-act-1980', 'The Indian Nursing Council Act, 1947': 'the-indian-nursing-council-act-1947', 'The Identification of Prisoners Act, 1920': 'the-identification-of-prisoners-act-1920', 'The Indian Boilers Act, 1923': 'the-indian-boilers-act-1923', 'The Armed Forces (Emergency Duties) Act, 1947': 'the-armed-forces-emergency-duties-act-1947', 'The Marking of Heavy Packages Act, 1951': 'the-marking-of-heavy-packages-act-1951', 'The Central Silk Board Act, 1948': 'the-central-silk-board-act-1948', 'The Scheduled Castes and the Scheduled Tribes (Prevention of Atrocities) Act, 1989': 'the-scheduled-castes-and-the-scheduled-tribes-prevention-of-atrocities-act-1989', 'The Insolvency and Bankruptcy Code, 2016': 'the-insolvency-and-bankruptcy-code-2016', 'The Central Goods and Services Tax (Extension to Jammu and Kashmir) Act, 2017': 'the-central-goods-and-services-tax-extension-to-jammu-and-kashmir-act-2017', 'The Uttaranchal (Alteration of Name) Act, 2006': 'the-uttaranchal-alteration-of-name-act-2006', 'The Metro Railways (Operation and Maintenance) Act, 2002': 'the-metro-railways-operation-and-maintenance-act-2002', 'The Jawaharlal Institute of Post-Graduate Medical Education and Research, Puducherry, Act, 2008': 'the-jawaharlal-institute-of-post-graduate-medical-education-and-research-puducherry-act-2008', 'The United Nations (Security Council) Act, 1947': 'the-united-nations-security-council-act-1947', 'The Maulana Azad National Urdu University Act, 1996': 'the-maulana-azad-national-urdu-university-act-1996', 'The Standards of Weights and Measures (Extension to Kohima and Mokokchung Districts) Act, 1967': 'the-standards-of-weights-and-measures-extension-to-kohima-and-mokokchung-districts-act-1967', 'The Indian Medical Council Act, 1956': 'the-indian-medical-council-act-1956', 'The State Financial Corporations Act, 1951': 'the-state-financial-corporations-act-1951', 'The Orphanages and other Charitable Homes (Supervision and Control) Act, 1960': 'the-orphanages-and-other-charitable-homes-supervision-and-control-act-1960', 'The State of Nagaland Act, 1962': 'the-state-of-nagaland-act-1962', 'The Administrative Tribunals (Amendment) Act, 1986': 'the-administrative-tribunals-amendment-act-1986', 'The Official Trusteers Act, 1913': 'the-official-trusteers-act-1913', 'The National Waterways Act, 2016.': 'the-national-waterways-act-2016', 'The Delhi Fire Prevention and Fire Safety Act, 1986': 'the-delhi-fire-prevention-and-fire-safety-act-1986', 'The Inland Vessels Act, 1917': 'the-inland-vessels-act-1917', 'The Delhi Motor Vehicles Taxation Act, 1962': 'the-delhi-motor-vehicles-taxation-act-1962', 'The National Capital Territory of Delhi Laws (Special Provisions) Act, 2011': 'the-national-capital-territory-of-delhi-laws-special-provisions-act-2011', 'The Government of National Capital Territory Act, 1991 (1 of 1992)': 'the-government-of-national-capital-territory-act-1991-1-of-1992', 'The Sexual Harassment of Women at Workplace (Prevention, Prohibition and Redressal) Act, 2013': 'the-sexual-harassment-of-women-at-workplace-prevention-prohibition-and-redressal-act-2013', 'The Municipal Taxation Act, 1881': 'the-municipal-taxation-act-1881', 'The Textile Undertakings (Nationalisation) Act, 1995': 'the-textile-undertakings-nationalisation-act-1995', 'The Indira Gandhi University Meerpur Act 2013': 'the-indira-gandhi-university-meerpur-act-2013', 'The Indian Easements Act, 1882': 'the-indian-easements-act-1882', 'The Destructive Insects and Pests (Amendment and Validation) Act, 1992': 'the-destructive-insects-and-pests-amendment-and-validation-act-1992', 'The Delhi Laws (Special Provisions) Act, 2006': 'the-delhi-laws-special-provisions-act-2006', 'The Calcutta High Court (Jurisdictional Limits) Act, 1919': 'the-calcutta-high-court-jurisdictional-limits-act-1919', 'The British India Corporation Limited (Acquisition of Shares) Act, 1981': 'the-british-india-corporation-limited-acquisition-of-shares-act-1981', 'The Delhi and Ajmer-Merwara Land Development Act, 1948': 'the-delhi-and-ajmer-merwara-land-development-act-1948', 'The Employees Provident Funds and Miscellaneous Provisions Act, 1952': 'the-employees-provident-funds-and-miscellaneous-provisions-act-1952', 'The Payment of Bonus Act, 1965': 'the-payment-of-bonus-act-1965', 'The Hindu Minority and Guardianship\xa0Act, 1956': 'the-hindu-minority-and-guardianship-act-1956', 'The Public Premises (Eviction of Unauthorised Occupants) Act, 1971': 'the-public-premises-eviction-of-unauthorised-occupants-act-1971', 'The Khuda Bakhsh Oriental Public Library Act, 1969': 'the-khuda-bakhsh-oriental-public-library-act-1969', 'The Limited Liability Partnership Act, 2008': 'the-limited-liability-partnership-act-2008', 'The Remittances of Foreign Exchange and Investment in Foreign Exchange Bonds (Immunities and Exemptions) Act, 1991': 'the-remittances-of-foreign-exchange-and-investment-in-foreign-exchange-bonds-immunities-and-exemptions-act-1991', 'The Coal Bearing Areas (Acquisition and Development) Act, 1957': 'the-coal-bearing-areas-acquisition-and-development-act-1957', 'The Factories Act, 1948': 'the-factories-act-1948', 'The Actuaries Act, 2006': 'the-actuaries-act-2006', "The Gujarat Legislative Assembly Members' Salaries and Allowances Act, 1960": 'the-gujarat-legislative-assembly-members-salaries-and-allowances-act-1960', 'The Indian Railways (Amendment) Act, 1983': 'the-indian-railways-amendment-act-1983', 'The Haryana Law officers (Engagement) Act, 2016': 'the-aadhaar-targeted-delivery-of-financial-and-other-subsidies-benefits-and-services-act-2016', 'The Central Excises and Salt and Additional Duties of Excise (Amendment) Act, 1980': 'the-central-excises-and-salt-and-additional-duties-of-excise-amendment-act-1980', 'The Dentists Act, 1948': 'the-dentists-act-1948', 'The Criminal Law Amendment (Amending) Act, 1966': 'the-criminal-law-amendment-amending-act-1966', 'The Public Financial Institutions (Obligation as to Fidelity and Secrecy) Act, 1983': 'the-public-financial-institutions-obligation-as-to-fidelity-and-secrecy-act-1983', 'The Joint-stock Companies Act, 1857': 'the-joint-stock-companies-act-1857', 'The Reserve and Auxiliary Air Forces Act, 1952': 'the-reserve-and-auxiliary-air-forces-act-1952', 'The Cutchi Memons Act, 1938': 'the-cutchi-memons-act-1938', 'The Protection of Human Rights Act, 1993': 'the-protection-of-human-rights-act-1993', 'The Indian Securities Act, 1920': 'the-indian-securities-act-1920', 'The National Capital Territory of Delhi Laws (Special Provisions) Second Act, 2011': 'the-national-capital-territory-of-delhi-laws-special-provisions-second-act-2011', 'The Economic Offences (Inapplicability of Limitation)\xa0\xa0Act, 1974': 'the-economic-offences-inapplicability-of-limitation-act-1974', 'The Banking Companies (Acquisition and Transfer of Undertakings) Act, 1980': 'the-banking-companies-acquisition-and-transfer-of-undertakings-act-1980', 'The Indian Tolls (Army and Air Force) Act, 1901': 'the-indian-tolls-army-and-air-force-act-1901', 'The Provident Funds Act, 1925': 'the-provident-funds-act-1925', 'The Haryana Goods and Services Tax Act, 2017 (Act No.19 of 2017)': 'the-haryana-goods-and-services-tax-act-2017-act-no-19-of-2017', 'The South Asian University Act, 2008': 'the-south-asian-university-act-2008', 'The Admiralty (Jurisdiction and Settlement of Maritime Claims) Act, 2017': 'the-admiralty-jurisdiction-and-settlement-of-maritime-claims-act-2017', 'The Maintenance and Welfare of Parents and Senior Citizens Act, 2007': 'the-maintenance-and-welfare-of-parents-and-senior-citizens-act-2007', 'The Scheduled Areas (Assimilation of Laws) Act, 1953': 'the-scheduled-areas-assimilation-of-laws-act-1953', 'The Judges (Inquiry) Act, 1968': 'the-judges-inquiry-act-1968', 'The Calcutta Port (Pilotage)\xa0Act, 1948': 'the-calcutta-port-pilotage-act-1948', 'The Specific Relief Act, 1963': 'the-specific-relief-act-1963', 'The Aligarh Muslim University (Amendment) Act, 1981': 'the-aligarh-muslim-university-amendment-act-1981', 'The Visva-Bharati (Amendment) Act, 1984': 'the-visva-bharati-amendment-act-1984', 'The Jammu and Kashmir (Extension of Laws) Act, 1956': 'the-jammu-and-kashmir-extension-of-laws-act-1956', 'The Punjab District Boards Act, 1883': 'the-punjab-district-boards-act-1883', 'The Calcutta High Court (Extension of Jurisdiction) Act, 1953': 'the-calcutta-high-court-extension-of-jurisdiction-act-1953', 'The Taxation Laws (Continuation and Validation of Recovery Proceedings) Act, 1964': 'the-taxation-laws-continuation-and-validation-of-recovery-proceedings-act-1964', 'The Swadeshi Cotton Mills Company Limited (Acquisition and Transfer of Undertakings) Act, 1986': 'the-swadeshi-cotton-mills-company-limited-acquisition-and-transfer-of-undertakings-act-1986', 'The Merchant Shipping (Amendment) Act, 1986': 'the-merchant-shipping-amendment-act-1986', 'The Special Criminal Courts (Jurisdiction) Act, 1950': 'the-special-criminal-courts-jurisdiction-act-1950', 'The Enemy Property Act, 1968': 'the-enemy-property-act-1968', 'The Revenue Recovery Act, 1890': 'the-revenue-recovery-act-1890', 'The Presidency Small Cause Courts Act, 1882': 'the-presidency-small-cause-courts-act-1882', 'The Aligarh Muslim University (Amendment) Act, 1972': 'the-aligarh-muslim-university-amendment-act-1972', 'The Street Vendors (Protection of Livelihood and Regulation of Street Vending) Act, 2014': 'the-street-vendors-protection-of-livelihood-and-regulation-of-street-vending-act-2014', 'The Companies Act, 2013': 'the-companies-act-2013', 'The Repatriation of Prisoners Act, 2003': 'the-repatriation-of-prisoners-act-2003', 'The Punjab Gram Panchayat, Samities and Zilla Parishad (Chandigarh Repeal) Act, 1994': 'the-punjab-gram-panchayat-samities-and-zilla-parishad-chandigarh-repeal-act-1994', 'The Oriental Gas Company Act, 1857': 'the-oriental-gas-company-act-1857', 'The Tea Act, 1953': 'the-tea-act-1953', 'The Black Money (Undisclosed Foreign Income and Assets) and Imposition of Tax Act, 2015': 'the-black-money-undisclosed-foreign-income-and-assets-and-imposition-of-tax-act-2015', 'The Bureau of Indian Standards Act, 2016.': 'the-bureau-of-indian-standards-act-2016', 'The Manipur Municipalities Act, 1994': 'the-manipur-municipalities-act-1994', 'The Indian Soldiers (Litigation) Act, 1925': 'the-indian-soldiers-litigation-act-1925', 'The Environment (Protection) Act, 1986': 'the-environment-protection-act-1986', 'The Exchange of Prisoners\xa0Act, 1948': 'the-exchange-of-prisoners-act-1948', 'The Central Universities Act, 2009': 'the-central-universities-act-2009', 'The Securities and Exchange Board of India Act, 1992': 'the-securities-and-exchange-board-of-india-act-1992', 'The Union Territories (Direct Election to the House of the People) Act, 1965': 'the-union-territories-direct-election-to-the-house-of-the-people-act-1965', 'The Tripura University Act, 2006': 'the-tripura-university-act-2006', 'The Building and Other Construction Workers Welfare Cess Act, 1996': 'the-building-and-other-construction-workers-welfare-cess-act-1996', 'The Administrators-General Act, 1963': 'the-administrators-general-act-1963', 'The Delhi Rent Control Act, 1995': 'the-delhi-rent-act-1995', 'The Official Secrets Act, 1923': 'the-official-secrets-act-1923', 'The Commission of Sati (Prevention ) Act, 1987': 'the-commission-of-sati-prevention-act-1987', 'The Foreign Marriage Act, 1969': 'the-foreign-marriage-act-1969', 'The National Commission for Safai Karamcharis Act, 1993': 'the-national-commission-for-safai-karamcharis-act-1993', 'The National Commission for Backward Classes Act, 1993': 'the-national-commission-for-backward-classes-act-1993', 'The Pondicherry University Act, 1985': 'the-pondicherry-university-act-1985', 'The Industrial Disputes (Banking and Insurance Companies) Act, 1949': 'the-industrial-disputes-banking-and-insurance-companies-act-1949', 'The Bhopal Gas Leak Disaster (Processing of Claims) Act, 1985': 'the-bhopal-gas-leak-disaster-processing-of-claims-act-1985', 'The National Capital Territory of Delhi Laws (Special Provisions) Act, 2009': 'the-national-capital-territory-of-delhi-laws-special-provisions-act-2009', 'The Code of Criminal Procedure Act, 1973': 'the-code-of-criminal-procedure-act-1973', 'The Forest (Conservation) Act, 1980': 'the-forest-conservation-act-1980', 'The Cardamom Act, 1965': 'the-cardamom-act-1965', 'The Shillong (Rifle Range and Umlong) Cantonments Assimilation of Laws Act, 1954': 'the-shillong-rifle-range-and-umlong-cantonments-assimilation-of-laws-act-1954', 'The Electricity Act, 2003': 'the-electricity-act-2003', 'The Uttar Pradesh Reorganisation Act, 2000': 'the-uttar-pradesh-reorganisation-act-2000', 'The Prevention of Illicit Traffic in Narcotic Drugs and Psychotropic Substances Act, 1988': 'the-prevention-of-illicit-traffic-in-narcotic-drugs-and-psychotropic-substances-act-1988', 'The Atomic Energy Act, 1962': 'the-atomic-energy-act-1962', 'The Vice-President s Pension Act, 1997': 'the-vice-president-s-pension-act-1997', 'The Inter-State Migrant Workmen (Regulation of Employment and Conditions of Service) Act, 1979': 'the-inter-state-migrant-workmen-regulation-of-employment-and-conditions-of-service-act-1979', 'The Army and Air Force (Disposal of Private Property) Act, 1950': 'the-army-and-air-force-disposal-of-private-property-act-1950', 'The Employees State Insurance Act, 1948': 'the-employees-state-insurance-act-1948', 'The Assam Rifles Act, 2006': 'the-assam-rifles-act-2006', 'The Murshidabad Estate Administration Act, 1933': 'the-murshidabad-estate-administration-act-1933', 'The Bureau of Indian Standards Act, 1986': 'the-bureau-of-indian-standards-act-1986', 'The Employment Exchanges (Compulsory Notification of Vacancies) Act, 1959': 'the-employment-exchanges-compulsory-notification-of-vacancies-act-1959', 'The Cess and Other Taxes on Minerals (Validation) Act, 1992': 'the-cess-and-other-taxes-on-minerals-validation-act-1992', 'The Easements (Extending Act 5 of 1882), 1891': 'the-easements-extending-act-5-of-1882-1891', 'The Major Port Trusts Act, 1963': 'the-major-port-trusts-act-1963', 'The White Phosphorus Matches Prohibition Act, 1913': 'the-white-phosphorus-matches-prohibition-act-1913', 'The Lotteries (Regulation) Act, 1998': 'the-lotteries-regulation-act-1998', 'The Prevention and Control of Infectious and Contagious Diseases in Animals Act, 2009': 'the-prevention-and-control-of-infectious-and-contagious-diseases-in-animals-act-2009', 'The Contempt of Courts Act, 1971': 'the-contempt-of-courts-act-1971', 'The Payment of Wages Act, 1936': 'the-payment-of-wages-act-1936', 'The State of Arunachal Pradesh Act, 1986': 'the-state-of-arunachal-pradesh-act-1986', 'The Interest Act, 1978': 'the-interest-act-1978', 'The Orissa (Alteration of Name) Act, 2011': 'the-orissa-alteration-of-name-act-2011', 'The Disturbed Areas (Special Courts) Act, 1976': 'the-disturbed-areas-special-courts-act-1976', 'The Sree Chitra Tirunal Institute for Medical Sciences and Technology, Trivandrum Act, 1980': 'the-sree-chitra-tirunal-institute-for-medical-sciences-and-technology-trivandrum-act-1980', 'The Geneva Conventions Act, 1960': 'the-geneva-conventions-act-1960', 'The Deposit Insurance and Credit Guarantee Corporation Act, 1961': 'the-deposit-insurance-and-credit-guarantee-corporation-act-1961', 'The Aircraft Act, 1934': 'the-aircraft-act-1934', 'The Companies (Profits) Surtax Act, 1964': 'the-companies-profits-surtax-act-1964', 'The Census Act, 1948': 'the-census-act-1948', 'The Advocates (Amendment) Act, 1976': 'the-advocates-amendment-act-1976', 'The Gujarat Legislative Assembly (Speaker and Deputy Speaker) Salaries and Allowances Act, 1960.': 'the-gujarat-legislative-assembly-speaker-and-deputy-speaker-salaries-and-allowances-act-1960', 'The River Boards Act, 1956': 'the-river-boards-act-1956', 'The Oriental Gas Company Act, 1867': 'the-oriental-gas-company-act-1867', 'The Indian Contract Act, 1872': 'the-indian-contract-act-1872', 'The Passport (Entry into India) Act, 1920': 'the-passport-entry-into-india-act-1920', 'The Jute Manufactures Development Council Act, 1983': 'the-jute-manufactures-development-council-act-1983', 'The Maternity Benefit Act, 1961': 'the-maternity-benefit-act-1961', 'The Negotiable Instruments Act, 1881': 'the-negotiable-instruments-act-1881', 'The Delhi School Education Act, 1973': 'the-delhi-school-education-act-1973', 'The Coconut Development Board Act, 1979': 'the-coconut-development-board-act-1979', 'The Punjab Disturbed Areas Act, 1983': 'the-punjab-disturbed-areas-act-1983', 'The Civil Liability for Nuclear Damage Act, 2010': 'the-civil-liability-for-nuclear-damage-act-2010', 'The Kazis Act, 1880': 'the-kazis-act-1880', 'The Andhra State Act, 1953': 'the-andhra-state-act-1953', 'The Coal Mines (Special Provisions) Act, 2015': 'the-coal-mines-special-provisions-act-2015', 'The Gram Nyayalayas Act, 2008': 'the-gram-nyayalayas-act-2008', 'The Suppression of Unlawful Acts against Safety of Civil Aviation Act, 1982': 'the-suppression-of-unlawful-acts-against-safety-of-civil-aviation-act-1982', 'The Plantations Labour Act, 1951': 'the-plantations-labour-act-1951', 'The Foreigners Act, 1946': 'the-foreigners-act-1946', 'The Rajiv Gandhi University Act, 2006': 'the-rajiv-gandhi-university-act-2006', 'The Assam (Alteration of Boundaries) Act, 1951': 'the-assam-alteration-of-boundaries-act-1951', 'The Marine Insurance Act, 1963': 'the-marine-insurance-act-1963', 'The Andhra Pradesh and Madras (Alteration of Boundaries) Act, 1959': 'the-andhra-pradesh-and-madras-alteration-of-boundaries-act-1959', 'The Sikkim University Act, 2006': 'the-sikkim-university-act-2006', 'The Bonded Labour System (Abolition) Act, 1976': 'the-bonded-labour-system-abolition-act-1976', 'The Indian Institute of Petroleum and Energy Act': 'the-indian-institute-of-petroleum-and-energy-act', 'The Resettlement of Displaced Persons (Land Acquisition)\xa0\xa0Act, 1948': 'the-resettlement-of-displaced-persons-land-acquisition-act-1948', 'The Inter-State River Water Disputes Act, 1956': 'the-inter-state-river-water-disputes-act-1956', 'The Inchek Tyres Limited and National Rubber Manufacturers Limited (Nationalisation) Act, 1984': 'the-inchek-tyres-limited-and-national-rubber-manufacturers-limited-nationalisation-act-1984', 'The Commercial Courts, Commercial Division and Commercial Appellate Division of High Courts Act, 2015.': 'the-commercial-courts-commercial-division-and-commercial-appellate-division-of-high-courts-act-2015', 'The Haryana Law Officers (Engagement) Amendment Act, 2017': 'the-haryana-law-officers-engagement-amendment-act-2017', 'The Juvenile Justice (Care and Protection of Children) Act, 2015.': 'the-juvenile-justice-care-and-protection-of-children-act-2015', 'The National Capital Territory of Delhi Laws (Special Provisions) At, 2007': 'the-national-capital-territory-of-delhi-laws-special-provisions-at-2007', 'The Tyre Corporation of India Limited (Disinvestment of Ownership) Act, 2007': 'the-tyre-corporation-of-india-limited-disinvestment-of-ownership-act-2007', 'The Interest-tax Act, 1974': 'the-interest-tax-act-1974', 'The Light House Act, 1927': 'the-light-house-act-1927', 'The Voluntary Surrender of Salaries (Exemption from Taxation) Act, 1961': 'the-voluntary-surrender-of-salaries-exemption-from-taxation-act-1961', 'The Prisoners (Attendance in Courts) Act, 1955': 'the-prisoners-attendance-in-courts-act-1955', 'The Commissions of Inquiry Act, 1952': 'the-commissions-of-inquiry-act-1952', 'The National Security Act, 1980': 'the-national-security-act-1980', 'The Public Liability Insurance Act, 1991': 'the-public-liability-insurance-act-1991', "The Workmen's Compensation Act, 1923": 'the-workmen-s-compensation-act-1923', 'The Delhi Sikh Gurdwaras Act, 1971': 'the-delhi-sikh-gurdwaras-act-1971', 'The Explosive Substances Act, 1908': 'the-explosive-substances-act-1908', 'The Prisoners Act, 1900': 'the-prisoners-act-1900', 'The Punjab Municipal Corporation Law (Extension to Chandigarh) Act, 1994': 'the-punjab-municipal-corporation-law-extension-to-chandigarh-act-1994', 'The Court-Fees Act, 1870': 'the-court-fees-act-1870', 'The Legal Practitioners Act, 1879': 'the-legal-practitioners-act-1879', 'The Compulsory Deposit Scheme Act, 1963': 'the-compulsory-deposit-scheme-act-1963', 'The Dock Workers (Regulation of Employment) (Inapplicability to Major Ports) Act, 1997': 'the-dock-workers-regulation-of-employment-inapplicability-to-major-ports-act-1997', 'The Payment and Settlement Systems Act, 2007': 'the-payment-and-settlement-systems-act-2007', 'The Haryana Sports Council Act, 2016 (Haryana Act No.30 of 2016).': 'the-haryana-sports-council-act-2016-haryana-act-no-30-of-2016', 'The Criminal Law (Amendment) Act, 1932': 'the-criminal-law-amendment-act-1932', 'The National Institute of Fashion Technology Act, 2006': 'the-national-institute-of-fashion-technology-act-2006', 'The Land Ports Authority of India Act, 2010': 'the-land-ports-authority-of-india-act-2010', "The Bankers' Books Evidence Act, 1891": 'the-bankers-books-evidence-act-1891', 'The Central Industrial Security Force (Amendment and Validation) Act, 1999': 'the-central-industrial-security-force-amendment-and-validation-act-1999', 'The Air Corporations (Amendment) \xa0Act, 1962': 'the-air-corporations-amendment-act-1962', 'The Goa, Daman and Diu Reorganisation Act, 1987': 'the-goa-daman-and-diu-reorganisation-act-1987', 'The Dock Workers (Regulation of Employment) Act, 1948': 'the-dock-workers-regulation-of-employment-act-1948', 'The Mussalman Wakf Act, 1923': 'the-mussalman-wakf-act-1923', 'The All-India Institutes of Medical Sciences\xa0Act, 1956': 'the-all-india-institutes-of-medical-sciences-act-1956', 'The Repealing and Amending Act': 'the-repealing-and-amending-act', 'The West Godavari District (Assimilation of Laws on Federal Subjects) Act, 1949': 'the-west-godavari-district-assimilation-of-laws-on-federal-subjects-act-1949', 'The National Waterway (Allahabad-Haldia Stretch of the Ganga-Bhagirathi-Hooghly River) Act, 1982': 'the-national-waterway-allahabad-haldia-stretch-of-the-ganga-bhagirathi-hooghly-river-act-1982', 'The Government of Union Territories (Amendment) Act, 1984': 'the-government-of-union-territories-amendment-act-1984', 'The Lokpal and Lokayuktas Act, 2013': 'the-lokpal-and-lokayuktas-act-2013', 'The Chandigarh (Delegation of Powers) Act, 1987': 'the-chandigarh-delegation-of-powers-act-1987', 'The Banking Regulation Act, 1949': 'the-banking-regulation-act-1949', 'The Central Road Fund Act, 2000': 'the-central-road-fund-act-2000', 'The Requisitioning and Acquisition of Immovable Property Act, 1952': 'the-requisitioning-and-acquisition-of-immovable-property-act-1952', 'The Indian Tramways Act, 1886': 'the-indian-tramways-act-1886', 'The Indian Criminal Law Amendment Act, 1908': 'the-indian-criminal-law-amendment-act-1908', 'The National Food Security Act, 2013': 'the-national-food-security-act-2013', 'The Visva-Bharati Act, 1951': 'the-visva-bharati-act-1951', 'The Indian Institutes of Information Technology Act, 2014': 'the-indian-institutes-of-information-technology-act-2014', 'The Protection of Women from Domestic Violence Act, 2005': 'the-protection-of-women-from-domestic-violence-act-2005', 'The Telecom Regulatory Authority of India Act, 1997': 'the-telecom-regulatory-authority-of-india-act-1997', 'The Armed Forces (Jammu and Kashmir) Special Powers Act, 1990': 'the-armed-forces-jammu-and-kashmir-special-powers-act-1990', 'The Bharat Petroleum Corporation Limited (Determination of Conditions of Service of Employees ) Act, 1988': 'the-bharat-petroleum-corporation-limited-determination-of-conditions-of-service-of-employees-act-1988', 'The Transfer of Prisoners Act, 1950': 'the-transfer-of-prisoners-act-1950', 'The Naval and Aircraft Prize Act, 1971': 'the-naval-and-aircraft-prize-act-1971', 'The Prevention of Blackmarketing and Maintenance of Supplies of Essential Commodities Act, 1980': 'the-prevention-of-blackmarketing-and-maintenance-of-supplies-of-essential-commodities-act-1980', 'The Emergency Risks (Goods) Insurance Act, 1971': 'the-emergency-risks-goods-insurance-act-1971', 'The Bird and Company Limited (Acquisition and Transfer of Undertakings and Other Properties) Act, 1980': 'the-bird-and-company-limited-acquisition-and-transfer-of-undertakings-and-other-properties-act-1980', 'The Probation of Offenders Act, 1958': 'the-probation-of-offenders-act-1958', 'The Petroleum Act, 1934': 'the-petroleum-act-1934', 'The Territorial Army Act, 1948': 'the-territorial-army-act-1948', 'The Taxation Laws (Amendment and Miscellaneous Provisions) Act, 1965': 'the-taxation-laws-amendment-and-miscellaneous-provisions-act-1965', 'The Coir Industry Act, 1953': 'the-coir-industry-act-1953', 'The Central Boards of Revenue\xa0Act, 1963': 'the-central-boards-of-revenue-act-1963', 'The Dadra and Nagar Haveli Act, 1961': 'the-dadra-and-nagar-haveli-act-1961', 'The State Emblem of India (Prohibition of Improper Use) Act, 2005': 'the-state-emblem-of-india-prohibition-of-improper-use-act-2005', 'The Working Journalists and other Newspaper Employees (Conditions of Service) and Miscellaneous Provisions Act, 1955': 'the-working-journalists-and-other-newspaper-employees-conditions-of-service-and-miscellaneous-provisions-act-1955', 'The Land Acquisition (Mines) Act, 1885': 'the-land-acquisition-mines-act-1885', 'The Haryana and Uttar Pradesh (Alteration of Boundaries) Act, 1979': 'the-haryana-and-uttar-pradesh-alteration-of-boundaries-act-1979', 'The Coinage Act, 2011': 'the-coinage-act-2011', 'The International Airports Authority (Amendment) Act, 1985': 'the-international-airports-authority-amendment-act-1985', 'The Medicinal and Toilet Preparations (Excise Duties) Act, 1955': 'the-medicinal-and-toilet-preparations-excise-duties-act-1955', 'The Asian Development Bank Act, 1966': 'the-asian-development-bank-act-1966', 'The Indian Carriage of Goods by Sea Act, 1925': 'the-indian-carriage-of-goods-by-sea-act-1925', 'The Extradition Act, 1962': 'the-extradition-act-1962', 'The Minimum Wages Act, 1948': 'the-minimum-wages-act-1948', 'The Haryana Backward Classes (Reservation In Services and Admission In Educational Institutions) Act, 2016': 'the-haryana-backward-classes-reservation-in-services-and-admission-in-educational-institutions-act-201', 'The Trade Unions Act, 1926': 'the-trade-unions-act-1926', 'The Union Territories (Laws) Act, 1950': 'the-union-territories-laws-act-1950', 'The Immigrants (Expulsion from Assam) Act, 1950': 'the-immigrants-expulsion-from-assam-act-1950', 'The Insurance Regulatory and Development Authority Act, 1999': 'the-insurance-regulatory-and-development-authority-act-1999', 'The Finance Commission (Miscellaneous Provisions) Act, 1951': 'the-finance-commission-miscellaneous-provisions-act-1951', 'The Consumer Protection Act, 1986': 'the-consumer-protection-act-1986', 'The Shri Krishna Ayush University Kurukshetra Act, 2016 (Haryana Act No. 25 of 2017)': 'the-shri-krishna-ayush-university-kurukshetra-act-2016-haryana-act-no-25-of-2017', 'The National Highways Act, 1956': 'the-national-highways-act-1956', 'The Judges (Protection) Act, 1985': 'the-judges-protection-act-1985', 'The National Commission for Minority Educational Institutes Act, 2004': 'the-national-commission-for-minority-educational-institutes-act-2004', 'The School of Planning and Architecture Act, 2014': 'the-school-of-planning-and-architecture-act-2014', 'The Government of Union Territories Act, 1963': 'the-government-of-union-territories-act-1963-20-of-1963', 'The Acquisition of Certain Area at Ayodhya Act, 1993': 'the-acquisition-of-certain-area-at-ayodhya-act-1993', 'The Andhra Pradesh Legislative Council Act, 2005': 'the-andhra-pradesh-legislative-council-act-2005', 'The State Bank of India (Subsidiary Banks) Act, 1959': 'the-state-bank-of-india-subsidiary-banks-act-1959', 'The Chaparmukh-Silghat Railway Line and the Katakhal-Lalabazar Railway Line (Nationalisation) Act, 1982': 'the-chaparmukh-silghat-railway-line-and-the-katakhal-lalabazar-railway-line-nationalisation-act-1982', 'The Epidemic Diseases Act, 1897': 'the-epidemic-diseases-act-1897', 'The Special Economic Zones Act, 2005': 'the-special-economic-zones-act-2005', 'The Special Court (Trial of Offences Relating to Transactions in Securities) Act, 1992': 'the-special-court-trial-of-offences-relating-to-transactions-in-securities-act-1992', 'The National Capital Territory of Delhi Laws (Special Provisions) Second Act, 2009': 'the-national-capital-territory-of-delhi-laws-special-provisions-second-act-2009', 'The Recovery of Debts Due to Banks and Financial Institutions Act, 1993': 'the-the-recovery-of-debts-due-to-banks-and-financial-institutions-act-1993', 'The Rajghat Samadhi Act, 1951': 'the-rajghat-samadhi-act-1951', 'The Insecticides Act, 1968': 'the-insecticides-act-1968', 'The Delhi Police Act, 1978': 'the-delhi-police-act-1978', 'The Prevention of Money-Laundering Act, 2002': 'the-prevention-of-money-laundering-act-2002', 'The Handlooms (Reservation of Articles for Production) Act, 1985': 'the-handlooms-reservation-of-articles-for-production-act-1985', 'The Bihar and Uttar Pradesh (Alteration of Boundaries) Act, 1968': 'the-bihar-and-uttar-pradesh-alteration-of-boundaries-act-1968', "The Unorganised Workers' Social Security Act, 2008": 'the-unorganised-workers-social-security-act-2008', 'The Suits Valuation Act, 1887': 'the-suits-valuation-act-1887', 'The Cine-workers Welfare Fund Act, 1981': 'the-cine-workers-welfare-fund-act-1981', 'The Wild Life (Protection) Act, 1972': 'the-wildlife-protection-act-1972', 'The Sports Broadcasting Signals (Mandatory Sharing with PrasarBharati) Act, 2007': 'the-sports-broadcasting-signals-mandatory-sharing-with-prasarbharati-act-2007', 'The Manoeuvres, Field Firing and Artillery Practice Act, 1938': 'the-manoeuvres-field-firing-and-artillery-practice-act-1938', 'The Mines Act, 1952': 'the-mines-act-1952', 'The Manipur (Village Authorities in Hill Areas) Act, 1956': 'the-manipur-village-authorities-in-hill-areas-act-1956', 'The Religious Institutions (Prevention of Misuse) Act, 1988': 'the-religious-institutions-prevention-of-misuse-act-1988', 'The Prohibition of Benami Property Transactions Act, 1988': 'the-prohibition-of-benami-property-transactions-act-1988', 'The Charitable and Religious Trusts Act, 1920': 'the-charitable-and-religious-trusts-act-1920', 'The The Continuance of Legal Proceedings Act, 1948': 'the-the-continuance-of-legal-proceedings-act-1948', 'The National Commission for Women Act, 1990': 'the-national-commission-for-women-act-1990', 'The Foreign Contribution (Regulation) Act, 2010': 'the-foreign-contribution-regulation-act-2010', 'The Dr. Rajendra Prasad Central Agricultural University Act, 2016.': 'the-dr-rajendra-prasad-central-agricultural-university-act-2016', 'The National Cadet Corps Act, 1948': 'the-national-cadet-corps-act-1948', 'The Science and Engineering Research Board Act, 2008': 'the-science-and-engineering-research-board-act-2008', 'The Indian Forest Act, 1927': 'the-indian-forest-act', 'The Salaries and Allowances of Officers of Parliament Act, 1953': 'the-salaries-and-allowances-of-officers-of-parliament-act-1953', 'The Building and Other Construction Workers (Regulation of Employment and Conditions of Service) Act, 1996': 'the-building-and-other-construction-workers-regulation-of-employment-and-conditions-of-service-act-1996', 'The Academy of Scientific and Innovative Research Act, 2011': 'the-academy-of-scientific-and-innovative-research-act-2011', 'The Andhra Pradesh Reorganisation Act, 2014': 'the-andhra-pradesh-reorganisation-act-2014', 'The Forward Contracts (Regulation) Act, 1952': 'the-forward-contracts-regulation-act-1952', 'The Suppression of Unlawful Acts Against Safety of Maritime Navigation and Fixed Platforms on Continental Shelf Act, 2002': 'the-suppression-of-unlawful-acts-against-safety-of-maritime-navigation-and-fixed-platforms-on-continental-shelf-act', 'The Inflammable Substances Act, 1952': 'the-inflammable-substances-act-1952', 'The Legal Tender (Inscribed Notes) Act, 1964': 'the-legal-tender-inscribed-notes-act-1964', 'The Anti-Hijacking Act, 1982': 'the-anti-hijacking-act-1982', 'The National Tax Tribunal Act, 2005': 'the-national-tax-tribunal-act-2005', 'The Khadi and Village Industries Commission Act, 1956': 'the-khadi-and-village-industries-commission-act-1956', 'The Cantonments (Extension of Rent Control Laws) Act, 1957': 'the-cantonments-extension-of-rent-control-laws-act-1957', 'The Pharmacy Act, 1948': 'the-pharmacy-act-1948', 'The Coal Mines (Conservation and Development) Act, 1974': 'the-coal-mines-conservation-and-development-act-1974', 'The Mines (Amendment) Act, 1983': 'the-mines-amendment-act-1983', 'The Central Laws (Extension to Arunachal Pradesh) Act, 1993': 'the-central-laws-extension-to-arunachal-pradesh-act-1993', 'The Rajiv Gandhi National Aviation University Act, 2013': 'the-rajiv-gandhi-national-aviation-university-act-2013', 'The Real Estate (Regulation and Development) Act, 2016.': 'the-real-estate-regulation-and-development-act-2016', 'The General Clauses Act, 1897': 'the-general-clauses-act-1897', 'The Andhra Scientific Company Limited (Acquisition and Transfer of Undertakings) Act, 1982': 'the-andhra-scientific-company-limited-acquisition-and-transfer-of-undertakings-act-1982', 'The Cine-workers and Cinema Theatre Workers (Regulation of Employment) Act, 1981': 'the-cine-workers-and-cinema-theatre-workers-regulation-of-employment-act-1981', 'The Manipur (Sales of Motor Spirit and Lubricants) Taxation Act, 1962': 'the-manipur-sales-of-motor-spirit-and-lubricants-taxation-act-1962', 'The Muslim Personal Law (Shariat) Application Act, 1937': 'the-muslim-personal-law-shariat-application-act-1937', 'The States Reorganiztion Act, 1956 (37 of 1956)': 'the-states-reorganisation-act-1956', 'The Motor Vehicles Act, 1988': 'the-motor-vehicles-act-1988', 'The Cause 1 of Article 240': 'the-cause-1-of-article-240', 'The Post Office Cash Certificates Act, 1917': 'the-post-office-cash-certificates-act-1917', 'The Stage-Carriages Act, 1861': 'the-stage-carriages-act-1861', 'The Jute Packaging Materials (Compulsory Use in Packing Commodities) Act, 1987': 'the-jute-packaging-materials-compulsory-use-in-packing-commodities-act-1987', 'The Election Commission (Conditions of Service of Election Commissioners and Transaction of Business) Act, 1991': 'the-election-commission-conditions-of-service-of-election-commissioners-and-transaction-of-business-act-1991', 'The Police Act, 1949': 'the-police-act-1949', 'The Hindu Gains of Learning Act, 1930': 'the-hindu-gains-of-learning-act-1930', 'The Legal Metrology Act, 2009': 'the-legal-metrology-act-2009', 'The Goods and Services Tax (Compensation to States) Act, 2017': 'the-goods-and-services-tax-compensation-to-states-act-2017', 'The Child and Adolescent Labour (Prohibition and Regulation) Act, 1986': 'the-child-and-adolescent-labour-prohibition-and-regulation-act-1986', 'The International Finance Corporation (Status, Immunities and Privileges) Act, 1958': 'the-international-finance-corporation-status-immunities-and-privileges-act-1958', 'The Carriage by Road Act, 2007': 'the-carriage-by-road-act-2007', 'The Salaries and Allowances of Ministers Act, 1952': 'the-salaries-and-allowances-of-ministers-act-1952', 'The Airports Economic Regulatory Authority of India Act, 2008': 'the-airports-economic-regulatory-authority-of-india-act-2008', 'The Indian Reserve Forces Act, 1888': 'the-indian-reserve-forces-act-1888', 'The Reserve Bank of India Act, 1934': 'the-reserve-bank-of-india-act-1934', 'The High Court Judges Salaries and Conditions of Service Act, 1954': 'the-high-court-judges-salaries-and-conditions-of-service-act-1954', 'The Mental Healthcare Act, 2017': 'the-mental-healthcare-act-2017', 'The Land Improvement Loans Act, 1883': 'the-land-improvement-loans-act-1883', 'The National Investigation Agency Act, 2008': 'the-national-investigation-agency-act-2008', 'The PUNJAB AYURVEDIC AND UNANI PRACTITIONERS': 'the-punjab-ayurvedic-and-unani-practitioners', 'The Notaries Act, 1952': 'the-notaries-act-1952', 'The National Trust for Welfare of Persons with Autism, Cerebral Palsy, Mental Retardation and Multiple Disabilities Act, 1999': 'the-national-trust-for-welfare-of-persons-with-autism-cerebral-palsy-mental-retardation-and-multiple-disabilities-a', 'The Hooghly Docking and Engineering Company Limited (Acquisition and Transfer of Undertakings) Act, 1984': 'the-hooghly-docking-and-engineering-company-limited-acquisition-and-transfer-of-undertakings-act-1984', 'The Arya Marriage Validation Act, 1937': 'the-arya-marriage-validation-act-1937', 'The Babasaheb Bhimrao Ambedkar University Act, 1994': 'the-babasaheb-bhimrao-ambedkar-university-act-1994', 'The Leaders and Chief Whips of Recognised Parties and Groups in Parliament (Facilities) Act, 1998': 'the-leaders-and-chief-whips-of-recognised-parties-and-groups-in-parliament-facilities-act-1998', 'The Antiquities and Art Treasures Act, 1972': 'the-antiquities-and-art-treasures-act-1972', 'The Anti-Hijacking Act, 2016.': 'the-anti-hijacking-act-2016', 'The Whistle Blowers Protection Act, 2014': 'the-whistle-blowers-protection-act-2014', 'The Water (Prevention and Control of Pollution) Cess Act, 1977': 'the-water-prevention-and-control-of-pollution-cess-act-1977', 'The University of Hyderabad\xa0Act, 1974': 'the-university-of-hyderabad-act-1974', 'The State Bank of Hyderabad Act, 1956': 'the-state-bank-of-hyderabad-act-1956', 'The Sugar Development Fund Act, 1982': 'the-sugar-development-fund-act-1982', 'The Border Security Force Act, 1968': 'the-border-security-force-act-1968', 'The Infant Milk Substitutes, Feeding Bottles and Infant Foods (Regulation of Production, Supply and Distribution) Act, 1992': 'the-infant-milk-substitutes-feeding-bottles-and-infant-foods-regulation-of-production-supply-and-distribution-act', 'The Tamil Nadu Legislative Council Act, 2010': 'the-tamil-nadu-legislative-council-act-2010', 'The Micro, Small and Medium Enterprises Development Act, 2006': 'the-micro-small-and-medium-enterprises-development-act-2006', 'The Industrial Development Bank (Transfer of Undertaking and Repeal) Act, 2003': 'the-industrial-development-bank-transfer-of-undertaking-and-repeal-act-2003', 'The Prasar Bharati (Broadcasting Corporation of India) Act, 1990': 'the-prasar-bharati-broadcasting-corporation-of-india-act-1990', 'The Registration of Foreigners Act, 1939': 'the-registration-of-foreigners-act-1939',
'Cr.':'the-code-of-civil-procedure-act-1908',
'Cr.P.C.':'the-code-of-civil-procedure-act-1908',
'IPC':'the-indian-penal-code-1860',
'Code':'the-code-of-civil-procedure-act-1908'}
| 19,291.8
| 96,280
| 0.781037
| 13,970
| 96,459
| 5.39277
| 0.103364
| 0.010393
| 0.005708
| 0.008601
| 0.983607
| 0.952281
| 0.852795
| 0.707421
| 0.575096
| 0.452805
| 0
| 0.083363
| 0.080853
| 96,459
| 5
| 96,281
| 19,291.8
| 0.766366
| 0
| 0
| 0
| 0
| 28.4
| 0.924145
| 0.450446
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 11
|
128751db3dd68f6312a11c3f75d0bae8f657edcc
| 1,950
|
py
|
Python
|
rgkit/maps/afffsdd/hourglass.py
|
outkine/rgkit
|
eb5d80c0d1815cc016bf7c584310120991760cc8
|
[
"Unlicense"
] | 1
|
2021-11-04T22:19:59.000Z
|
2021-11-04T22:19:59.000Z
|
rgkit/maps/afffsdd/hourglass.py
|
outkine/rgkit
|
eb5d80c0d1815cc016bf7c584310120991760cc8
|
[
"Unlicense"
] | null | null | null |
rgkit/maps/afffsdd/hourglass.py
|
outkine/rgkit
|
eb5d80c0d1815cc016bf7c584310120991760cc8
|
[
"Unlicense"
] | 2
|
2021-02-16T09:37:47.000Z
|
2021-11-04T22:30:51.000Z
|
# Map by afffsdd
# A hourglass shaped map, with a chokepoint at the center.
# flake8: noqa
# TODO: Format this file.
{'spawn': [(1, 1), (3, 1), (5, 1), (7, 1), (9, 1), (11, 1), (13, 1), (15, 1), (17, 1), (1, 17), (3, 17), (5, 17), (7, 17), (9, 17), (11, 17), (13, 17), (15, 17), (17, 17)], 'obstacle': [(0, 0), (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), (7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0), (13, 0), (14, 0), (15, 0), (16, 0), (17, 0), (18, 0), (0, 1), (18, 1), (0, 2), (18, 2), (0, 3), (18, 3), (0, 4), (1, 4), (2, 4), (16, 4), (17, 4), (18, 4), (0, 5), (1, 5), (2, 5), (3, 5), (4, 5), (14, 5), (15, 5), (16, 5), (17, 5), (18, 5), (0, 6), (1, 6), (2, 6), (3, 6), (4, 6), (5, 6), (13, 6), (14, 6), (15, 6), (16, 6), (17, 6), (18, 6), (0, 7), (1, 7), (2, 7), (3, 7), (4, 7), (5, 7), (6, 7), (12, 7), (13, 7), (14, 7), (15, 7), (16, 7), (17, 7), (18, 7), (0, 8), (1, 8), (2, 8), (3, 8), (4, 8), (5, 8), (6, 8), (7, 8), (11, 8), (12, 8), (13, 8), (14, 8), (15, 8), (16, 8), (17, 8), (18, 8), (0, 9), (1, 9), (2, 9), (3, 9), (4, 9), (5, 9), (6, 9), (7, 9), (8, 9), (10, 9), (11, 9), (12, 9), (13, 9), (14, 9), (15, 9), (16, 9), (17, 9), (18, 9), (0, 10), (1, 10), (2, 10), (3, 10), (4, 10), (5, 10), (6, 10), (7, 10), (11, 10), (12, 10), (13, 10), (14, 10), (15, 10), (16, 10), (17, 10), (18, 10), (0, 11), (1, 11), (2, 11), (3, 11), (4, 11), (5, 11), (6, 11), (12, 11), (13, 11), (14, 11), (15, 11), (16, 11), (17, 11), (18, 11), (0, 12), (1, 12), (2, 12), (3, 12), (4, 12), (5, 12), (13, 12), (14, 12), (15, 12), (16, 12), (17, 12), (18, 12), (0, 13), (1, 13), (2, 13), (3, 13), (4, 13), (14, 13), (15, 13), (16, 13), (17, 13), (18, 13), (0, 14), (1, 14), (2, 14), (16, 14), (17, 14), (18, 14), (0, 15), (18, 15), (0, 16), (18, 16), (0, 17), (18, 17), (0, 18), (1, 18), (2, 18), (3, 18), (4, 18), (5, 18), (6, 18), (7, 18), (8, 18), (9, 18), (10, 18), (11, 18), (12, 18), (13, 18), (14, 18), (15, 18), (16, 18), (17, 18), (18, 18)]}
| 325
| 1,832
| 0.355385
| 425
| 1,950
| 1.630588
| 0.089412
| 0.005772
| 0.008658
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.391847
| 0.22
| 1,950
| 5
| 1,833
| 390
| 0.063774
| 0.055385
| 0
| 0
| 0
| 0
| 0.007077
| 0
| 0
| 0
| 0
| 0.2
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
129bdf559e804ceefb17abc73d8257d4baac265e
| 1,517
|
py
|
Python
|
test/rplugin/python3/deoplete/test_matcher_full_fuzzy.py
|
kazufusa/deoplete.nvim
|
e61b8faee0b07f837f9009d780cbc3c2caaef76a
|
[
"MIT"
] | 10
|
2020-07-21T21:59:54.000Z
|
2021-07-19T11:01:47.000Z
|
test/rplugin/python3/deoplete/test_matcher_full_fuzzy.py
|
kazufusa/deoplete.nvim
|
e61b8faee0b07f837f9009d780cbc3c2caaef76a
|
[
"MIT"
] | 4
|
2017-04-15T17:45:36.000Z
|
2017-11-20T16:27:17.000Z
|
test/rplugin/python3/deoplete/test_matcher_full_fuzzy.py
|
kazufusa/deoplete.nvim
|
e61b8faee0b07f837f9009d780cbc3c2caaef76a
|
[
"MIT"
] | 1
|
2021-01-30T18:17:01.000Z
|
2021-01-30T18:17:01.000Z
|
from deoplete.filter.matcher_full_fuzzy import Filter
from test_matcher_fuzzy import _ctx
def test_matcher_full_fuzzy():
f = Filter(None)
assert f.name == 'matcher_full_fuzzy'
assert f.description == 'full fuzzy matcher'
ctx = _ctx('')
assert f.filter(ctx) == [
{ 'word': 'foobar' },
{ 'word': 'afoobar' },
{ 'word': 'fooBar' },
{ 'word': 'afooBar' },
{ 'word': 'Foobar' },
{ 'word': 'aFoobar' },
{ 'word': 'FooBar' },
{ 'word': 'aFooBar' },
]
ctx = _ctx('FOBR')
assert f.filter(ctx) == [
{ 'word': 'foobar' },
{ 'word': 'afoobar' },
{ 'word': 'fooBar' },
{ 'word': 'afooBar' },
{ 'word': 'Foobar' },
{ 'word': 'aFoobar' },
{ 'word': 'FooBar' },
{ 'word': 'aFooBar' },
]
ctx = _ctx('foBr', ignorecase=False)
assert f.filter(ctx) == [
{ 'word': 'fooBar' },
{ 'word': 'afooBar' },
{ 'word': 'FooBar' },
{ 'word': 'aFooBar' },
]
ctx = _ctx('fobr', camelcase=False)
assert f.filter(ctx) == [
{ 'word': 'foobar' },
{ 'word': 'afoobar' },
{ 'word': 'fooBar' },
{ 'word': 'afooBar' },
{ 'word': 'Foobar' },
{ 'word': 'aFoobar' },
{ 'word': 'FooBar' },
{ 'word': 'aFooBar' },
]
ctx = _ctx('fobr', ignorecase=False, camelcase=False)
assert f.filter(ctx) == [
{ 'word': 'foobar' },
{ 'word': 'afoobar' },
]
| 25.283333
| 57
| 0.45089
| 136
| 1,517
| 4.919118
| 0.161765
| 0.224215
| 0.313901
| 0.470852
| 0.736921
| 0.736921
| 0.736921
| 0.736921
| 0.736921
| 0.736921
| 0
| 0
| 0.334212
| 1,517
| 59
| 58
| 25.711864
| 0.662376
| 0
| 0
| 0.686275
| 0
| 0
| 0.241925
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 1
| 0.019608
| false
| 0
| 0.039216
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
12ae0d207d48a2d5cd27a73a98a36bf4de1383f4
| 999,997
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_manageability_perfmgmt_cfg.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_manageability_perfmgmt_cfg.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_manageability_perfmgmt_cfg.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_manageability_perfmgmt_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR manageability\-perfmgmt package configuration.
This module contains definitions
for the following management objects\:
perf\-mgmt\: Performance Management configuration & operations
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class PmThresholdOp(Enum):
"""
PmThresholdOp (Enum Class)
Pm threshold op
.. data:: eq = 1
Equal to
.. data:: ne = 2
Not equal to
.. data:: lt = 3
Less than
.. data:: le = 4
Less than or equal to
.. data:: gt = 5
Greater than
.. data:: ge = 6
Greater than or equal to
.. data:: rg = 7
Not in Range
"""
eq = Enum.YLeaf(1, "eq")
ne = Enum.YLeaf(2, "ne")
lt = Enum.YLeaf(3, "lt")
le = Enum.YLeaf(4, "le")
gt = Enum.YLeaf(5, "gt")
ge = Enum.YLeaf(6, "ge")
rg = Enum.YLeaf(7, "rg")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PmThresholdOp']
class PmThresholdRearm(Enum):
"""
PmThresholdRearm (Enum Class)
Pm threshold rearm
.. data:: always = 0
Rearm Always
.. data:: window = 1
Rearm after window of sampling periods
.. data:: toggle = 2
Rearm after the first period when condition is
not met
"""
always = Enum.YLeaf(0, "always")
window = Enum.YLeaf(1, "window")
toggle = Enum.YLeaf(2, "toggle")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PmThresholdRearm']
class PerfMgmt(_Entity_):
"""
Performance Management configuration & operations
.. attribute:: resources
Resources configuration
**type**\: :py:class:`Resources <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Resources>`
.. attribute:: statistics
Templates for collection of statistics
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics>`
.. attribute:: enable
Start data collection and/or threshold monitoring
**type**\: :py:class:`Enable <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable>`
.. attribute:: reg_exp_groups
Configure regular expression group
**type**\: :py:class:`RegExpGroups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.RegExpGroups>`
.. attribute:: threshold
Container for threshold templates
**type**\: :py:class:`Threshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt, self).__init__()
self._top_entity = None
self.yang_name = "perf-mgmt"
self.yang_parent_name = "Cisco-IOS-XR-manageability-perfmgmt-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("resources", ("resources", PerfMgmt.Resources)), ("statistics", ("statistics", PerfMgmt.Statistics)), ("enable", ("enable", PerfMgmt.Enable)), ("reg-exp-groups", ("reg_exp_groups", PerfMgmt.RegExpGroups)), ("threshold", ("threshold", PerfMgmt.Threshold))])
self._leafs = OrderedDict()
self.resources = PerfMgmt.Resources()
self.resources.parent = self
self._children_name_map["resources"] = "resources"
self.statistics = PerfMgmt.Statistics()
self.statistics.parent = self
self._children_name_map["statistics"] = "statistics"
self.enable = PerfMgmt.Enable()
self.enable.parent = self
self._children_name_map["enable"] = "enable"
self.reg_exp_groups = PerfMgmt.RegExpGroups()
self.reg_exp_groups.parent = self
self._children_name_map["reg_exp_groups"] = "reg-exp-groups"
self.threshold = PerfMgmt.Threshold()
self.threshold.parent = self
self._children_name_map["threshold"] = "threshold"
self._segment_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt, [], name, value)
class Resources(_Entity_):
"""
Resources configuration
.. attribute:: tftp_resources
Configure the TFTP server IP address and directory name
**type**\: :py:class:`TftpResources <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Resources.TftpResources>`
**presence node**\: True
.. attribute:: dump_local
Configure local dump parameters
**type**\: :py:class:`DumpLocal <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Resources.DumpLocal>`
.. attribute:: memory_resources
Configure the memory usage limits of performance management
**type**\: :py:class:`MemoryResources <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Resources.MemoryResources>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Resources, self).__init__()
self.yang_name = "resources"
self.yang_parent_name = "perf-mgmt"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("tftp-resources", ("tftp_resources", PerfMgmt.Resources.TftpResources)), ("dump-local", ("dump_local", PerfMgmt.Resources.DumpLocal)), ("memory-resources", ("memory_resources", PerfMgmt.Resources.MemoryResources))])
self._leafs = OrderedDict()
self.tftp_resources = None
self._children_name_map["tftp_resources"] = "tftp-resources"
self.dump_local = PerfMgmt.Resources.DumpLocal()
self.dump_local.parent = self
self._children_name_map["dump_local"] = "dump-local"
self.memory_resources = PerfMgmt.Resources.MemoryResources()
self.memory_resources.parent = self
self._children_name_map["memory_resources"] = "memory-resources"
self._segment_path = lambda: "resources"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Resources, [], name, value)
class TftpResources(_Entity_):
"""
Configure the TFTP server IP address and
directory name
.. attribute:: server_address
IP address of the TFTP server
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
.. attribute:: directory
Directory name on TFTP server
**type**\: str
**mandatory**\: True
.. attribute:: vrf_name
VRF name
**type**\: str
**length:** 1..32
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Resources.TftpResources, self).__init__()
self.yang_name = "tftp-resources"
self.yang_parent_name = "resources"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('server_address', (YLeaf(YType.str, 'server-address'), ['str'])),
('directory', (YLeaf(YType.str, 'directory'), ['str'])),
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
])
self.server_address = None
self.directory = None
self.vrf_name = None
self._segment_path = lambda: "tftp-resources"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/resources/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Resources.TftpResources, ['server_address', 'directory', 'vrf_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Resources.TftpResources']['meta_info']
class DumpLocal(_Entity_):
"""
Configure local dump parameters
.. attribute:: enable
Enable data dump onto local filesystem
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Resources.DumpLocal, self).__init__()
self.yang_name = "dump-local"
self.yang_parent_name = "resources"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
])
self.enable = None
self._segment_path = lambda: "dump-local"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/resources/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Resources.DumpLocal, ['enable'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Resources.DumpLocal']['meta_info']
class MemoryResources(_Entity_):
"""
Configure the memory usage limits of
performance management
.. attribute:: max_limit
Maximum limit for memory usage (Kbytes) for data buffers
**type**\: int
**range:** 0..4294967295
**units**\: kilobyte
.. attribute:: min_reserved
Specify a minimum free memory (Kbytes) to be ensured before allowing a collection request
**type**\: int
**range:** 0..4294967295
**units**\: kilobyte
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Resources.MemoryResources, self).__init__()
self.yang_name = "memory-resources"
self.yang_parent_name = "resources"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('max_limit', (YLeaf(YType.uint32, 'max-limit'), ['int'])),
('min_reserved', (YLeaf(YType.uint32, 'min-reserved'), ['int'])),
])
self.max_limit = None
self.min_reserved = None
self._segment_path = lambda: "memory-resources"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/resources/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Resources.MemoryResources, ['max_limit', 'min_reserved'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Resources.MemoryResources']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Resources']['meta_info']
class Statistics(_Entity_):
"""
Templates for collection of statistics
.. attribute:: generic_counter_interface
Interface Generic GenericCounter collection templates
**type**\: :py:class:`GenericCounterInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.GenericCounterInterface>`
.. attribute:: process_node
Node Process collection templates
**type**\: :py:class:`ProcessNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.ProcessNode>`
.. attribute:: basic_counter_interface
Interface BasicCounter collection templates
**type**\: :py:class:`BasicCounterInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.BasicCounterInterface>`
.. attribute:: ospfv3_protocol
OSPF v3 Protocol collection templates
**type**\: :py:class:`Ospfv3Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.Ospfv3Protocol>`
.. attribute:: cpu_node
Node CPU collection templates
**type**\: :py:class:`CpuNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.CpuNode>`
.. attribute:: data_rate_interface
Interface DataRate collection templates
**type**\: :py:class:`DataRateInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.DataRateInterface>`
.. attribute:: memory_node
Node Memory collection templates
**type**\: :py:class:`MemoryNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.MemoryNode>`
.. attribute:: ldp_mpls
MPLS LDP collection templates
**type**\: :py:class:`LdpMpls <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.LdpMpls>`
.. attribute:: bgp
BGP collection templates
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.Bgp>`
.. attribute:: ospfv2_protocol
OSPF v2 Protocol collection templates
**type**\: :py:class:`Ospfv2Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.Ospfv2Protocol>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics, self).__init__()
self.yang_name = "statistics"
self.yang_parent_name = "perf-mgmt"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("generic-counter-interface", ("generic_counter_interface", PerfMgmt.Statistics.GenericCounterInterface)), ("process-node", ("process_node", PerfMgmt.Statistics.ProcessNode)), ("basic-counter-interface", ("basic_counter_interface", PerfMgmt.Statistics.BasicCounterInterface)), ("ospfv3-protocol", ("ospfv3_protocol", PerfMgmt.Statistics.Ospfv3Protocol)), ("cpu-node", ("cpu_node", PerfMgmt.Statistics.CpuNode)), ("data-rate-interface", ("data_rate_interface", PerfMgmt.Statistics.DataRateInterface)), ("memory-node", ("memory_node", PerfMgmt.Statistics.MemoryNode)), ("ldp-mpls", ("ldp_mpls", PerfMgmt.Statistics.LdpMpls)), ("bgp", ("bgp", PerfMgmt.Statistics.Bgp)), ("ospfv2-protocol", ("ospfv2_protocol", PerfMgmt.Statistics.Ospfv2Protocol))])
self._leafs = OrderedDict()
self.generic_counter_interface = PerfMgmt.Statistics.GenericCounterInterface()
self.generic_counter_interface.parent = self
self._children_name_map["generic_counter_interface"] = "generic-counter-interface"
self.process_node = PerfMgmt.Statistics.ProcessNode()
self.process_node.parent = self
self._children_name_map["process_node"] = "process-node"
self.basic_counter_interface = PerfMgmt.Statistics.BasicCounterInterface()
self.basic_counter_interface.parent = self
self._children_name_map["basic_counter_interface"] = "basic-counter-interface"
self.ospfv3_protocol = PerfMgmt.Statistics.Ospfv3Protocol()
self.ospfv3_protocol.parent = self
self._children_name_map["ospfv3_protocol"] = "ospfv3-protocol"
self.cpu_node = PerfMgmt.Statistics.CpuNode()
self.cpu_node.parent = self
self._children_name_map["cpu_node"] = "cpu-node"
self.data_rate_interface = PerfMgmt.Statistics.DataRateInterface()
self.data_rate_interface.parent = self
self._children_name_map["data_rate_interface"] = "data-rate-interface"
self.memory_node = PerfMgmt.Statistics.MemoryNode()
self.memory_node.parent = self
self._children_name_map["memory_node"] = "memory-node"
self.ldp_mpls = PerfMgmt.Statistics.LdpMpls()
self.ldp_mpls.parent = self
self._children_name_map["ldp_mpls"] = "ldp-mpls"
self.bgp = PerfMgmt.Statistics.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self.ospfv2_protocol = PerfMgmt.Statistics.Ospfv2Protocol()
self.ospfv2_protocol.parent = self
self._children_name_map["ospfv2_protocol"] = "ospfv2-protocol"
self._segment_path = lambda: "statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics, [], name, value)
class GenericCounterInterface(_Entity_):
"""
Interface Generic GenericCounter collection
templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.GenericCounterInterface.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.GenericCounterInterface, self).__init__()
self.yang_name = "generic-counter-interface"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.GenericCounterInterface.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.GenericCounterInterface.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "generic-counter-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.GenericCounterInterface, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.GenericCounterInterface.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.GenericCounterInterface.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "generic-counter-interface"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.GenericCounterInterface.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/generic-counter-interface/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.GenericCounterInterface.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.GenericCounterInterface.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/generic-counter-interface/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.GenericCounterInterface.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.GenericCounterInterface.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.GenericCounterInterface.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.GenericCounterInterface']['meta_info']
class ProcessNode(_Entity_):
"""
Node Process collection templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.ProcessNode.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.ProcessNode, self).__init__()
self.yang_name = "process-node"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.ProcessNode.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.ProcessNode.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "process-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.ProcessNode, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.ProcessNode.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.ProcessNode.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "process-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.ProcessNode.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/process-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.ProcessNode.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.ProcessNode.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/process-node/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.ProcessNode.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.ProcessNode.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.ProcessNode.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.ProcessNode']['meta_info']
class BasicCounterInterface(_Entity_):
"""
Interface BasicCounter collection templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.BasicCounterInterface.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.BasicCounterInterface, self).__init__()
self.yang_name = "basic-counter-interface"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.BasicCounterInterface.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.BasicCounterInterface.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "basic-counter-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.BasicCounterInterface, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.BasicCounterInterface.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.BasicCounterInterface.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "basic-counter-interface"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.BasicCounterInterface.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/basic-counter-interface/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.BasicCounterInterface.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.BasicCounterInterface.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/basic-counter-interface/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.BasicCounterInterface.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.BasicCounterInterface.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.BasicCounterInterface.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.BasicCounterInterface']['meta_info']
class Ospfv3Protocol(_Entity_):
"""
OSPF v3 Protocol collection templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.Ospfv3Protocol.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.Ospfv3Protocol, self).__init__()
self.yang_name = "ospfv3-protocol"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.Ospfv3Protocol.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.Ospfv3Protocol.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "ospfv3-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.Ospfv3Protocol, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.Ospfv3Protocol.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.Ospfv3Protocol.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "ospfv3-protocol"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.Ospfv3Protocol.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/ospfv3-protocol/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.Ospfv3Protocol.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.Ospfv3Protocol.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/ospfv3-protocol/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.Ospfv3Protocol.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.Ospfv3Protocol.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.Ospfv3Protocol.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.Ospfv3Protocol']['meta_info']
class CpuNode(_Entity_):
"""
Node CPU collection templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.CpuNode.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.CpuNode, self).__init__()
self.yang_name = "cpu-node"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.CpuNode.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.CpuNode.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "cpu-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.CpuNode, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.CpuNode.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.CpuNode.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "cpu-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.CpuNode.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/cpu-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.CpuNode.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.CpuNode.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/cpu-node/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.CpuNode.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.CpuNode.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.CpuNode.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.CpuNode']['meta_info']
class DataRateInterface(_Entity_):
"""
Interface DataRate collection templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.DataRateInterface.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.DataRateInterface, self).__init__()
self.yang_name = "data-rate-interface"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.DataRateInterface.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.DataRateInterface.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "data-rate-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.DataRateInterface, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.DataRateInterface.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.DataRateInterface.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "data-rate-interface"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.DataRateInterface.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/data-rate-interface/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.DataRateInterface.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.DataRateInterface.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/data-rate-interface/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.DataRateInterface.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.DataRateInterface.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.DataRateInterface.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.DataRateInterface']['meta_info']
class MemoryNode(_Entity_):
"""
Node Memory collection templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.MemoryNode.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.MemoryNode, self).__init__()
self.yang_name = "memory-node"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.MemoryNode.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.MemoryNode.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "memory-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.MemoryNode, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.MemoryNode.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.MemoryNode.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "memory-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.MemoryNode.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/memory-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.MemoryNode.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.MemoryNode.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/memory-node/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.MemoryNode.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.MemoryNode.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.MemoryNode.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.MemoryNode']['meta_info']
class LdpMpls(_Entity_):
"""
MPLS LDP collection templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.LdpMpls.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.LdpMpls, self).__init__()
self.yang_name = "ldp-mpls"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.LdpMpls.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.LdpMpls.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "ldp-mpls"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.LdpMpls, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.LdpMpls.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.LdpMpls.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "ldp-mpls"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.LdpMpls.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/ldp-mpls/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.LdpMpls.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.LdpMpls.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/ldp-mpls/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.LdpMpls.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.LdpMpls.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.LdpMpls.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.LdpMpls']['meta_info']
class Bgp(_Entity_):
"""
BGP collection templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.Bgp.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.Bgp.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.Bgp.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "bgp"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.Bgp, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.Bgp.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.Bgp.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "bgp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.Bgp.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/bgp/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.Bgp.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.Bgp.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/bgp/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.Bgp.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.Bgp.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.Bgp.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.Bgp']['meta_info']
class Ospfv2Protocol(_Entity_):
"""
OSPF v2 Protocol collection templates
.. attribute:: templates
Template name
**type**\: :py:class:`Templates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.Ospfv2Protocol.Templates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.Ospfv2Protocol, self).__init__()
self.yang_name = "ospfv2-protocol"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("templates", ("templates", PerfMgmt.Statistics.Ospfv2Protocol.Templates))])
self._leafs = OrderedDict()
self.templates = PerfMgmt.Statistics.Ospfv2Protocol.Templates()
self.templates.parent = self
self._children_name_map["templates"] = "templates"
self._segment_path = lambda: "ospfv2-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.Ospfv2Protocol, [], name, value)
class Templates(_Entity_):
"""
Template name
.. attribute:: template
A template instance
**type**\: list of :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Statistics.Ospfv2Protocol.Templates.Template>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.Ospfv2Protocol.Templates, self).__init__()
self.yang_name = "templates"
self.yang_parent_name = "ospfv2-protocol"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("template", ("template", PerfMgmt.Statistics.Ospfv2Protocol.Templates.Template))])
self._leafs = OrderedDict()
self.template = YList(self)
self._segment_path = lambda: "templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/ospfv2-protocol/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.Ospfv2Protocol.Templates, [], name, value)
class Template(_Entity_):
"""
A template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: history_persistent
Enable persistent history statistics
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrf_group
VRF group configured in regular expression to be applied
**type**\: str
**length:** 1..32
.. attribute:: sample_interval
Frequency of each sample in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: sample_size
Number of samples to be taken
**type**\: int
**range:** 1..60
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Statistics.Ospfv2Protocol.Templates.Template, self).__init__()
self.yang_name = "template"
self.yang_parent_name = "templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('history_persistent', (YLeaf(YType.empty, 'history-persistent'), ['Empty'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('sample_size', (YLeaf(YType.uint32, 'sample-size'), ['int'])),
])
self.template_name = None
self.reg_exp_group = None
self.history_persistent = None
self.vrf_group = None
self.sample_interval = None
self.sample_size = None
self._segment_path = lambda: "template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/statistics/ospfv2-protocol/templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Statistics.Ospfv2Protocol.Templates.Template, ['template_name', 'reg_exp_group', 'history_persistent', 'vrf_group', 'sample_interval', 'sample_size'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.Ospfv2Protocol.Templates.Template']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.Ospfv2Protocol.Templates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics.Ospfv2Protocol']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Statistics']['meta_info']
class Enable(_Entity_):
"""
Start data collection and/or threshold
monitoring
.. attribute:: threshold
Start threshold monitoring using a defined template
**type**\: :py:class:`Threshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold>`
.. attribute:: statistics
Start periodic collection using a defined a template
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics>`
.. attribute:: monitor_enable
Start data collection for a monitored instance
**type**\: :py:class:`MonitorEnable <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable, self).__init__()
self.yang_name = "enable"
self.yang_parent_name = "perf-mgmt"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("threshold", ("threshold", PerfMgmt.Enable.Threshold)), ("statistics", ("statistics", PerfMgmt.Enable.Statistics)), ("monitor-enable", ("monitor_enable", PerfMgmt.Enable.MonitorEnable))])
self._leafs = OrderedDict()
self.threshold = PerfMgmt.Enable.Threshold()
self.threshold.parent = self
self._children_name_map["threshold"] = "threshold"
self.statistics = PerfMgmt.Enable.Statistics()
self.statistics.parent = self
self._children_name_map["statistics"] = "statistics"
self.monitor_enable = PerfMgmt.Enable.MonitorEnable()
self.monitor_enable.parent = self
self._children_name_map["monitor_enable"] = "monitor-enable"
self._segment_path = lambda: "enable"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable, [], name, value)
class Threshold(_Entity_):
"""
Start threshold monitoring using a defined
template
.. attribute:: ospfv3_protocol
Threshold monitoring for OSPF v3 Protocol
**type**\: :py:class:`Ospfv3Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.Ospfv3Protocol>`
.. attribute:: bgp
Threshold monitoring for BGP
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.Bgp>`
.. attribute:: data_rate_interface
Threshold monitoring for Interface data\-rates
**type**\: :py:class:`DataRateInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.DataRateInterface>`
.. attribute:: ospfv2_protocol
Threshold monitoring for OSPF v2 Protocol
**type**\: :py:class:`Ospfv2Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.Ospfv2Protocol>`
.. attribute:: memory_node
Threshold monitoring for memory
**type**\: :py:class:`MemoryNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.MemoryNode>`
.. attribute:: generic_counter_interface
Threshold monitoring for Interface generic\-counters
**type**\: :py:class:`GenericCounterInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.GenericCounterInterface>`
.. attribute:: cpu_node
Threshold monitoring for CPU
**type**\: :py:class:`CpuNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.CpuNode>`
.. attribute:: ldp_mpls
Threshold monitoring for LDP
**type**\: :py:class:`LdpMpls <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.LdpMpls>`
.. attribute:: process_node
Threshold monitoring for process
**type**\: :py:class:`ProcessNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.ProcessNode>`
.. attribute:: basic_counter_interface
Threshold monitoring for Interface basic\-counters
**type**\: :py:class:`BasicCounterInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.BasicCounterInterface>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold, self).__init__()
self.yang_name = "threshold"
self.yang_parent_name = "enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospfv3-protocol", ("ospfv3_protocol", PerfMgmt.Enable.Threshold.Ospfv3Protocol)), ("bgp", ("bgp", PerfMgmt.Enable.Threshold.Bgp)), ("data-rate-interface", ("data_rate_interface", PerfMgmt.Enable.Threshold.DataRateInterface)), ("ospfv2-protocol", ("ospfv2_protocol", PerfMgmt.Enable.Threshold.Ospfv2Protocol)), ("memory-node", ("memory_node", PerfMgmt.Enable.Threshold.MemoryNode)), ("generic-counter-interface", ("generic_counter_interface", PerfMgmt.Enable.Threshold.GenericCounterInterface)), ("cpu-node", ("cpu_node", PerfMgmt.Enable.Threshold.CpuNode)), ("ldp-mpls", ("ldp_mpls", PerfMgmt.Enable.Threshold.LdpMpls)), ("process-node", ("process_node", PerfMgmt.Enable.Threshold.ProcessNode)), ("basic-counter-interface", ("basic_counter_interface", PerfMgmt.Enable.Threshold.BasicCounterInterface))])
self._leafs = OrderedDict()
self.ospfv3_protocol = PerfMgmt.Enable.Threshold.Ospfv3Protocol()
self.ospfv3_protocol.parent = self
self._children_name_map["ospfv3_protocol"] = "ospfv3-protocol"
self.bgp = PerfMgmt.Enable.Threshold.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self.data_rate_interface = PerfMgmt.Enable.Threshold.DataRateInterface()
self.data_rate_interface.parent = self
self._children_name_map["data_rate_interface"] = "data-rate-interface"
self.ospfv2_protocol = PerfMgmt.Enable.Threshold.Ospfv2Protocol()
self.ospfv2_protocol.parent = self
self._children_name_map["ospfv2_protocol"] = "ospfv2-protocol"
self.memory_node = PerfMgmt.Enable.Threshold.MemoryNode()
self.memory_node.parent = self
self._children_name_map["memory_node"] = "memory-node"
self.generic_counter_interface = PerfMgmt.Enable.Threshold.GenericCounterInterface()
self.generic_counter_interface.parent = self
self._children_name_map["generic_counter_interface"] = "generic-counter-interface"
self.cpu_node = PerfMgmt.Enable.Threshold.CpuNode()
self.cpu_node.parent = self
self._children_name_map["cpu_node"] = "cpu-node"
self.ldp_mpls = PerfMgmt.Enable.Threshold.LdpMpls()
self.ldp_mpls.parent = self
self._children_name_map["ldp_mpls"] = "ldp-mpls"
self.process_node = PerfMgmt.Enable.Threshold.ProcessNode()
self.process_node.parent = self
self._children_name_map["process_node"] = "process-node"
self.basic_counter_interface = PerfMgmt.Enable.Threshold.BasicCounterInterface()
self.basic_counter_interface.parent = self
self._children_name_map["basic_counter_interface"] = "basic-counter-interface"
self._segment_path = lambda: "threshold"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold, [], name, value)
class Ospfv3Protocol(_Entity_):
"""
Threshold monitoring for OSPF v3 Protocol
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.Ospfv3Protocol, self).__init__()
self.yang_name = "ospfv3-protocol"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "ospfv3-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.Ospfv3Protocol, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.Ospfv3Protocol']['meta_info']
class Bgp(_Entity_):
"""
Threshold monitoring for BGP
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "bgp"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.Bgp, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.Bgp']['meta_info']
class DataRateInterface(_Entity_):
"""
Threshold monitoring for Interface data\-rates
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.DataRateInterface, self).__init__()
self.yang_name = "data-rate-interface"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "data-rate-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.DataRateInterface, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.DataRateInterface']['meta_info']
class Ospfv2Protocol(_Entity_):
"""
Threshold monitoring for OSPF v2 Protocol
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.Ospfv2Protocol, self).__init__()
self.yang_name = "ospfv2-protocol"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "ospfv2-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.Ospfv2Protocol, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.Ospfv2Protocol']['meta_info']
class MemoryNode(_Entity_):
"""
Threshold monitoring for memory
.. attribute:: nodes
Node specification
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.MemoryNode.Nodes>`
.. attribute:: node_all
All the the nodes
**type**\: :py:class:`NodeAll <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.MemoryNode.NodeAll>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.MemoryNode, self).__init__()
self.yang_name = "memory-node"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("nodes", ("nodes", PerfMgmt.Enable.Threshold.MemoryNode.Nodes)), ("node-all", ("node_all", PerfMgmt.Enable.Threshold.MemoryNode.NodeAll))])
self._leafs = OrderedDict()
self.nodes = PerfMgmt.Enable.Threshold.MemoryNode.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self.node_all = PerfMgmt.Enable.Threshold.MemoryNode.NodeAll()
self.node_all.parent = self
self._children_name_map["node_all"] = "node-all"
self._segment_path = lambda: "memory-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.MemoryNode, [], name, value)
class Nodes(_Entity_):
"""
Node specification
.. attribute:: node
Node instance
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.MemoryNode.Nodes.Node>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.MemoryNode.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "memory-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", PerfMgmt.Enable.Threshold.MemoryNode.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/memory-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.MemoryNode.Nodes, [], name, value)
class Node(_Entity_):
"""
Node instance
.. attribute:: node_id (key)
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.MemoryNode.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_id', (YLeaf(YType.str, 'node-id'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.node_id = None
self.template_name = None
self._segment_path = lambda: "node" + "[node-id='" + str(self.node_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/memory-node/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.MemoryNode.Nodes.Node, ['node_id', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.MemoryNode.Nodes.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.MemoryNode.Nodes']['meta_info']
class NodeAll(_Entity_):
"""
All the the nodes
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.MemoryNode.NodeAll, self).__init__()
self.yang_name = "node-all"
self.yang_parent_name = "memory-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "node-all"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/memory-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.MemoryNode.NodeAll, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.MemoryNode.NodeAll']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.MemoryNode']['meta_info']
class GenericCounterInterface(_Entity_):
"""
Threshold monitoring for Interface
generic\-counters
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.GenericCounterInterface, self).__init__()
self.yang_name = "generic-counter-interface"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "generic-counter-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.GenericCounterInterface, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.GenericCounterInterface']['meta_info']
class CpuNode(_Entity_):
"""
Threshold monitoring for CPU
.. attribute:: nodes
Node specification
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.CpuNode.Nodes>`
.. attribute:: node_all
All the the nodes
**type**\: :py:class:`NodeAll <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.CpuNode.NodeAll>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.CpuNode, self).__init__()
self.yang_name = "cpu-node"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("nodes", ("nodes", PerfMgmt.Enable.Threshold.CpuNode.Nodes)), ("node-all", ("node_all", PerfMgmt.Enable.Threshold.CpuNode.NodeAll))])
self._leafs = OrderedDict()
self.nodes = PerfMgmt.Enable.Threshold.CpuNode.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self.node_all = PerfMgmt.Enable.Threshold.CpuNode.NodeAll()
self.node_all.parent = self
self._children_name_map["node_all"] = "node-all"
self._segment_path = lambda: "cpu-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.CpuNode, [], name, value)
class Nodes(_Entity_):
"""
Node specification
.. attribute:: node
Node instance
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.CpuNode.Nodes.Node>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.CpuNode.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "cpu-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", PerfMgmt.Enable.Threshold.CpuNode.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/cpu-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.CpuNode.Nodes, [], name, value)
class Node(_Entity_):
"""
Node instance
.. attribute:: node_id (key)
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.CpuNode.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_id', (YLeaf(YType.str, 'node-id'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.node_id = None
self.template_name = None
self._segment_path = lambda: "node" + "[node-id='" + str(self.node_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/cpu-node/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.CpuNode.Nodes.Node, ['node_id', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.CpuNode.Nodes.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.CpuNode.Nodes']['meta_info']
class NodeAll(_Entity_):
"""
All the the nodes
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.CpuNode.NodeAll, self).__init__()
self.yang_name = "node-all"
self.yang_parent_name = "cpu-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "node-all"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/cpu-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.CpuNode.NodeAll, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.CpuNode.NodeAll']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.CpuNode']['meta_info']
class LdpMpls(_Entity_):
"""
Threshold monitoring for LDP
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.LdpMpls, self).__init__()
self.yang_name = "ldp-mpls"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "ldp-mpls"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.LdpMpls, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.LdpMpls']['meta_info']
class ProcessNode(_Entity_):
"""
Threshold monitoring for process
.. attribute:: nodes
Node specification
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.ProcessNode.Nodes>`
.. attribute:: node_all
All the the nodes
**type**\: :py:class:`NodeAll <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.ProcessNode.NodeAll>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.ProcessNode, self).__init__()
self.yang_name = "process-node"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("nodes", ("nodes", PerfMgmt.Enable.Threshold.ProcessNode.Nodes)), ("node-all", ("node_all", PerfMgmt.Enable.Threshold.ProcessNode.NodeAll))])
self._leafs = OrderedDict()
self.nodes = PerfMgmt.Enable.Threshold.ProcessNode.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self.node_all = PerfMgmt.Enable.Threshold.ProcessNode.NodeAll()
self.node_all.parent = self
self._children_name_map["node_all"] = "node-all"
self._segment_path = lambda: "process-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.ProcessNode, [], name, value)
class Nodes(_Entity_):
"""
Node specification
.. attribute:: node
Node instance
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Threshold.ProcessNode.Nodes.Node>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.ProcessNode.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "process-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", PerfMgmt.Enable.Threshold.ProcessNode.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/process-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.ProcessNode.Nodes, [], name, value)
class Node(_Entity_):
"""
Node instance
.. attribute:: node_id (key)
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.ProcessNode.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_id', (YLeaf(YType.str, 'node-id'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.node_id = None
self.template_name = None
self._segment_path = lambda: "node" + "[node-id='" + str(self.node_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/process-node/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.ProcessNode.Nodes.Node, ['node_id', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.ProcessNode.Nodes.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.ProcessNode.Nodes']['meta_info']
class NodeAll(_Entity_):
"""
All the the nodes
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.ProcessNode.NodeAll, self).__init__()
self.yang_name = "node-all"
self.yang_parent_name = "process-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "node-all"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/process-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.ProcessNode.NodeAll, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.ProcessNode.NodeAll']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.ProcessNode']['meta_info']
class BasicCounterInterface(_Entity_):
"""
Threshold monitoring for Interface
basic\-counters
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Threshold.BasicCounterInterface, self).__init__()
self.yang_name = "basic-counter-interface"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "basic-counter-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Threshold.BasicCounterInterface, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold.BasicCounterInterface']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Threshold']['meta_info']
class Statistics(_Entity_):
"""
Start periodic collection using a defined a
template
.. attribute:: generic_counter_interface
Statistics collection for generic\-counters
**type**\: :py:class:`GenericCounterInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.GenericCounterInterface>`
.. attribute:: bgp
Data collection for BGP
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.Bgp>`
.. attribute:: ospfv2_protocol
Data collection for OSPF v2 Protocol
**type**\: :py:class:`Ospfv2Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.Ospfv2Protocol>`
.. attribute:: ospfv3_protocol
Data collection for OSPF v3 Protocol
**type**\: :py:class:`Ospfv3Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.Ospfv3Protocol>`
.. attribute:: cpu_node
Collection for CPU
**type**\: :py:class:`CpuNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.CpuNode>`
.. attribute:: basic_counter_interface
Statistics collection for basic\-counters
**type**\: :py:class:`BasicCounterInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.BasicCounterInterface>`
.. attribute:: process_node
Collection for process
**type**\: :py:class:`ProcessNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.ProcessNode>`
.. attribute:: data_rate_interface
Statistics collection for generic\-counters
**type**\: :py:class:`DataRateInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.DataRateInterface>`
.. attribute:: memory_node
Collection for memory
**type**\: :py:class:`MemoryNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.MemoryNode>`
.. attribute:: ldp_mpls
Collection for labels distribution protocol
**type**\: :py:class:`LdpMpls <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.LdpMpls>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics, self).__init__()
self.yang_name = "statistics"
self.yang_parent_name = "enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("generic-counter-interface", ("generic_counter_interface", PerfMgmt.Enable.Statistics.GenericCounterInterface)), ("bgp", ("bgp", PerfMgmt.Enable.Statistics.Bgp)), ("ospfv2-protocol", ("ospfv2_protocol", PerfMgmt.Enable.Statistics.Ospfv2Protocol)), ("ospfv3-protocol", ("ospfv3_protocol", PerfMgmt.Enable.Statistics.Ospfv3Protocol)), ("cpu-node", ("cpu_node", PerfMgmt.Enable.Statistics.CpuNode)), ("basic-counter-interface", ("basic_counter_interface", PerfMgmt.Enable.Statistics.BasicCounterInterface)), ("process-node", ("process_node", PerfMgmt.Enable.Statistics.ProcessNode)), ("data-rate-interface", ("data_rate_interface", PerfMgmt.Enable.Statistics.DataRateInterface)), ("memory-node", ("memory_node", PerfMgmt.Enable.Statistics.MemoryNode)), ("ldp-mpls", ("ldp_mpls", PerfMgmt.Enable.Statistics.LdpMpls))])
self._leafs = OrderedDict()
self.generic_counter_interface = PerfMgmt.Enable.Statistics.GenericCounterInterface()
self.generic_counter_interface.parent = self
self._children_name_map["generic_counter_interface"] = "generic-counter-interface"
self.bgp = PerfMgmt.Enable.Statistics.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self.ospfv2_protocol = PerfMgmt.Enable.Statistics.Ospfv2Protocol()
self.ospfv2_protocol.parent = self
self._children_name_map["ospfv2_protocol"] = "ospfv2-protocol"
self.ospfv3_protocol = PerfMgmt.Enable.Statistics.Ospfv3Protocol()
self.ospfv3_protocol.parent = self
self._children_name_map["ospfv3_protocol"] = "ospfv3-protocol"
self.cpu_node = PerfMgmt.Enable.Statistics.CpuNode()
self.cpu_node.parent = self
self._children_name_map["cpu_node"] = "cpu-node"
self.basic_counter_interface = PerfMgmt.Enable.Statistics.BasicCounterInterface()
self.basic_counter_interface.parent = self
self._children_name_map["basic_counter_interface"] = "basic-counter-interface"
self.process_node = PerfMgmt.Enable.Statistics.ProcessNode()
self.process_node.parent = self
self._children_name_map["process_node"] = "process-node"
self.data_rate_interface = PerfMgmt.Enable.Statistics.DataRateInterface()
self.data_rate_interface.parent = self
self._children_name_map["data_rate_interface"] = "data-rate-interface"
self.memory_node = PerfMgmt.Enable.Statistics.MemoryNode()
self.memory_node.parent = self
self._children_name_map["memory_node"] = "memory-node"
self.ldp_mpls = PerfMgmt.Enable.Statistics.LdpMpls()
self.ldp_mpls.parent = self
self._children_name_map["ldp_mpls"] = "ldp-mpls"
self._segment_path = lambda: "statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics, [], name, value)
class GenericCounterInterface(_Entity_):
"""
Statistics collection for generic\-counters
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.GenericCounterInterface, self).__init__()
self.yang_name = "generic-counter-interface"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "generic-counter-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.GenericCounterInterface, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.GenericCounterInterface']['meta_info']
class Bgp(_Entity_):
"""
Data collection for BGP
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "bgp"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.Bgp, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.Bgp']['meta_info']
class Ospfv2Protocol(_Entity_):
"""
Data collection for OSPF v2 Protocol
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.Ospfv2Protocol, self).__init__()
self.yang_name = "ospfv2-protocol"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "ospfv2-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.Ospfv2Protocol, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.Ospfv2Protocol']['meta_info']
class Ospfv3Protocol(_Entity_):
"""
Data collection for OSPF v3 Protocol
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.Ospfv3Protocol, self).__init__()
self.yang_name = "ospfv3-protocol"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "ospfv3-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.Ospfv3Protocol, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.Ospfv3Protocol']['meta_info']
class CpuNode(_Entity_):
"""
Collection for CPU
.. attribute:: node_all
All the the nodes
**type**\: :py:class:`NodeAll <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.CpuNode.NodeAll>`
.. attribute:: nodes
Node specification
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.CpuNode.Nodes>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.CpuNode, self).__init__()
self.yang_name = "cpu-node"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node-all", ("node_all", PerfMgmt.Enable.Statistics.CpuNode.NodeAll)), ("nodes", ("nodes", PerfMgmt.Enable.Statistics.CpuNode.Nodes))])
self._leafs = OrderedDict()
self.node_all = PerfMgmt.Enable.Statistics.CpuNode.NodeAll()
self.node_all.parent = self
self._children_name_map["node_all"] = "node-all"
self.nodes = PerfMgmt.Enable.Statistics.CpuNode.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self._segment_path = lambda: "cpu-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.CpuNode, [], name, value)
class NodeAll(_Entity_):
"""
All the the nodes
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.CpuNode.NodeAll, self).__init__()
self.yang_name = "node-all"
self.yang_parent_name = "cpu-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "node-all"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/cpu-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.CpuNode.NodeAll, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.CpuNode.NodeAll']['meta_info']
class Nodes(_Entity_):
"""
Node specification
.. attribute:: node
Node instance
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.CpuNode.Nodes.Node>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.CpuNode.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "cpu-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", PerfMgmt.Enable.Statistics.CpuNode.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/cpu-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.CpuNode.Nodes, [], name, value)
class Node(_Entity_):
"""
Node instance
.. attribute:: node_id (key)
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.CpuNode.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_id', (YLeaf(YType.str, 'node-id'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.node_id = None
self.template_name = None
self._segment_path = lambda: "node" + "[node-id='" + str(self.node_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/cpu-node/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.CpuNode.Nodes.Node, ['node_id', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.CpuNode.Nodes.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.CpuNode.Nodes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.CpuNode']['meta_info']
class BasicCounterInterface(_Entity_):
"""
Statistics collection for basic\-counters
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.BasicCounterInterface, self).__init__()
self.yang_name = "basic-counter-interface"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "basic-counter-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.BasicCounterInterface, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.BasicCounterInterface']['meta_info']
class ProcessNode(_Entity_):
"""
Collection for process
.. attribute:: node_all
All the the nodes
**type**\: :py:class:`NodeAll <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.ProcessNode.NodeAll>`
.. attribute:: nodes
Node specification
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.ProcessNode.Nodes>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.ProcessNode, self).__init__()
self.yang_name = "process-node"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node-all", ("node_all", PerfMgmt.Enable.Statistics.ProcessNode.NodeAll)), ("nodes", ("nodes", PerfMgmt.Enable.Statistics.ProcessNode.Nodes))])
self._leafs = OrderedDict()
self.node_all = PerfMgmt.Enable.Statistics.ProcessNode.NodeAll()
self.node_all.parent = self
self._children_name_map["node_all"] = "node-all"
self.nodes = PerfMgmt.Enable.Statistics.ProcessNode.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self._segment_path = lambda: "process-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.ProcessNode, [], name, value)
class NodeAll(_Entity_):
"""
All the the nodes
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.ProcessNode.NodeAll, self).__init__()
self.yang_name = "node-all"
self.yang_parent_name = "process-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "node-all"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/process-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.ProcessNode.NodeAll, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.ProcessNode.NodeAll']['meta_info']
class Nodes(_Entity_):
"""
Node specification
.. attribute:: node
Node instance
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.ProcessNode.Nodes.Node>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.ProcessNode.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "process-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", PerfMgmt.Enable.Statistics.ProcessNode.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/process-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.ProcessNode.Nodes, [], name, value)
class Node(_Entity_):
"""
Node instance
.. attribute:: node_id (key)
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.ProcessNode.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_id', (YLeaf(YType.str, 'node-id'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.node_id = None
self.template_name = None
self._segment_path = lambda: "node" + "[node-id='" + str(self.node_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/process-node/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.ProcessNode.Nodes.Node, ['node_id', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.ProcessNode.Nodes.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.ProcessNode.Nodes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.ProcessNode']['meta_info']
class DataRateInterface(_Entity_):
"""
Statistics collection for generic\-counters
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.DataRateInterface, self).__init__()
self.yang_name = "data-rate-interface"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "data-rate-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.DataRateInterface, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.DataRateInterface']['meta_info']
class MemoryNode(_Entity_):
"""
Collection for memory
.. attribute:: node_all
All the the nodes
**type**\: :py:class:`NodeAll <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.MemoryNode.NodeAll>`
.. attribute:: nodes
Node specification
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.MemoryNode.Nodes>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.MemoryNode, self).__init__()
self.yang_name = "memory-node"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node-all", ("node_all", PerfMgmt.Enable.Statistics.MemoryNode.NodeAll)), ("nodes", ("nodes", PerfMgmt.Enable.Statistics.MemoryNode.Nodes))])
self._leafs = OrderedDict()
self.node_all = PerfMgmt.Enable.Statistics.MemoryNode.NodeAll()
self.node_all.parent = self
self._children_name_map["node_all"] = "node-all"
self.nodes = PerfMgmt.Enable.Statistics.MemoryNode.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self._segment_path = lambda: "memory-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.MemoryNode, [], name, value)
class NodeAll(_Entity_):
"""
All the the nodes
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.MemoryNode.NodeAll, self).__init__()
self.yang_name = "node-all"
self.yang_parent_name = "memory-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "node-all"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/memory-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.MemoryNode.NodeAll, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.MemoryNode.NodeAll']['meta_info']
class Nodes(_Entity_):
"""
Node specification
.. attribute:: node
Node instance
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.Statistics.MemoryNode.Nodes.Node>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.MemoryNode.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "memory-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", PerfMgmt.Enable.Statistics.MemoryNode.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/memory-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.MemoryNode.Nodes, [], name, value)
class Node(_Entity_):
"""
Node instance
.. attribute:: node_id (key)
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.MemoryNode.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_id', (YLeaf(YType.str, 'node-id'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.node_id = None
self.template_name = None
self._segment_path = lambda: "node" + "[node-id='" + str(self.node_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/memory-node/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.MemoryNode.Nodes.Node, ['node_id', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.MemoryNode.Nodes.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.MemoryNode.Nodes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.MemoryNode']['meta_info']
class LdpMpls(_Entity_):
"""
Collection for labels distribution protocol
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.Statistics.LdpMpls, self).__init__()
self.yang_name = "ldp-mpls"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.template_name = None
self._segment_path = lambda: "ldp-mpls"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.Statistics.LdpMpls, ['template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics.LdpMpls']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.Statistics']['meta_info']
class MonitorEnable(_Entity_):
"""
Start data collection for a monitored instance
.. attribute:: ldp_mpls
Monitoring for LDP
**type**\: :py:class:`LdpMpls <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.LdpMpls>`
.. attribute:: ospfv3_protocol
Monitor OSPF v3 Protocol
**type**\: :py:class:`Ospfv3Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol>`
.. attribute:: generic_counters
Monitoring for generic\-counters
**type**\: :py:class:`GenericCounters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.GenericCounters>`
.. attribute:: process
Collection for a single process
**type**\: :py:class:`Process <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Process>`
.. attribute:: basic_counters
Monitoring for basic\-counters
**type**\: :py:class:`BasicCounters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.BasicCounters>`
.. attribute:: memory
Collection for memory
**type**\: :py:class:`Memory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Memory>`
.. attribute:: ospfv2_protocol
Monitor OSPF v2 Protocol
**type**\: :py:class:`Ospfv2Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol>`
.. attribute:: cpu
Collection for CPU
**type**\: :py:class:`Cpu <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Cpu>`
.. attribute:: bgp
Monitor BGP protocol
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Bgp>`
.. attribute:: data_rates
Monitoring for data\-rates
**type**\: :py:class:`DataRates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.DataRates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable, self).__init__()
self.yang_name = "monitor-enable"
self.yang_parent_name = "enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ldp-mpls", ("ldp_mpls", PerfMgmt.Enable.MonitorEnable.LdpMpls)), ("ospfv3-protocol", ("ospfv3_protocol", PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol)), ("generic-counters", ("generic_counters", PerfMgmt.Enable.MonitorEnable.GenericCounters)), ("process", ("process", PerfMgmt.Enable.MonitorEnable.Process)), ("basic-counters", ("basic_counters", PerfMgmt.Enable.MonitorEnable.BasicCounters)), ("memory", ("memory", PerfMgmt.Enable.MonitorEnable.Memory)), ("ospfv2-protocol", ("ospfv2_protocol", PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol)), ("cpu", ("cpu", PerfMgmt.Enable.MonitorEnable.Cpu)), ("bgp", ("bgp", PerfMgmt.Enable.MonitorEnable.Bgp)), ("data-rates", ("data_rates", PerfMgmt.Enable.MonitorEnable.DataRates))])
self._leafs = OrderedDict()
self.ldp_mpls = PerfMgmt.Enable.MonitorEnable.LdpMpls()
self.ldp_mpls.parent = self
self._children_name_map["ldp_mpls"] = "ldp-mpls"
self.ospfv3_protocol = PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol()
self.ospfv3_protocol.parent = self
self._children_name_map["ospfv3_protocol"] = "ospfv3-protocol"
self.generic_counters = PerfMgmt.Enable.MonitorEnable.GenericCounters()
self.generic_counters.parent = self
self._children_name_map["generic_counters"] = "generic-counters"
self.process = PerfMgmt.Enable.MonitorEnable.Process()
self.process.parent = self
self._children_name_map["process"] = "process"
self.basic_counters = PerfMgmt.Enable.MonitorEnable.BasicCounters()
self.basic_counters.parent = self
self._children_name_map["basic_counters"] = "basic-counters"
self.memory = PerfMgmt.Enable.MonitorEnable.Memory()
self.memory.parent = self
self._children_name_map["memory"] = "memory"
self.ospfv2_protocol = PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol()
self.ospfv2_protocol.parent = self
self._children_name_map["ospfv2_protocol"] = "ospfv2-protocol"
self.cpu = PerfMgmt.Enable.MonitorEnable.Cpu()
self.cpu.parent = self
self._children_name_map["cpu"] = "cpu"
self.bgp = PerfMgmt.Enable.MonitorEnable.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self.data_rates = PerfMgmt.Enable.MonitorEnable.DataRates()
self.data_rates.parent = self
self._children_name_map["data_rates"] = "data-rates"
self._segment_path = lambda: "monitor-enable"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable, [], name, value)
class LdpMpls(_Entity_):
"""
Monitoring for LDP
.. attribute:: sessions
LDP session specification
**type**\: :py:class:`Sessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.LdpMpls, self).__init__()
self.yang_name = "ldp-mpls"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("sessions", ("sessions", PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions))])
self._leafs = OrderedDict()
self.sessions = PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions()
self.sessions.parent = self
self._children_name_map["sessions"] = "sessions"
self._segment_path = lambda: "ldp-mpls"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.LdpMpls, [], name, value)
class Sessions(_Entity_):
"""
LDP session specification
.. attribute:: session
IP address of the LDP Session
**type**\: list of :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions.Session>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions, self).__init__()
self.yang_name = "sessions"
self.yang_parent_name = "ldp-mpls"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session", ("session", PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions.Session))])
self._leafs = OrderedDict()
self.session = YList(self)
self._segment_path = lambda: "sessions"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/ldp-mpls/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions, [], name, value)
class Session(_Entity_):
"""
IP address of the LDP Session
.. attribute:: session (key)
IP address of the LDP Session
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions.Session, self).__init__()
self.yang_name = "session"
self.yang_parent_name = "sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['session']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('session', (YLeaf(YType.str, 'session'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.session = None
self.template_name = None
self._segment_path = lambda: "session" + "[session='" + str(self.session) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/ldp-mpls/sessions/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions.Session, ['session', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions.Session']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.LdpMpls.Sessions']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.LdpMpls']['meta_info']
class Ospfv3Protocol(_Entity_):
"""
Monitor OSPF v3 Protocol
.. attribute:: ospf_instances
Monitor an instance
**type**\: :py:class:`OspfInstances <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol, self).__init__()
self.yang_name = "ospfv3-protocol"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospf-instances", ("ospf_instances", PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances))])
self._leafs = OrderedDict()
self.ospf_instances = PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances()
self.ospf_instances.parent = self
self._children_name_map["ospf_instances"] = "ospf-instances"
self._segment_path = lambda: "ospfv3-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol, [], name, value)
class OspfInstances(_Entity_):
"""
Monitor an instance
.. attribute:: ospf_instance
Instance being monitored
**type**\: list of :py:class:`OspfInstance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances.OspfInstance>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances, self).__init__()
self.yang_name = "ospf-instances"
self.yang_parent_name = "ospfv3-protocol"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospf-instance", ("ospf_instance", PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances.OspfInstance))])
self._leafs = OrderedDict()
self.ospf_instance = YList(self)
self._segment_path = lambda: "ospf-instances"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/ospfv3-protocol/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances, [], name, value)
class OspfInstance(_Entity_):
"""
Instance being monitored
.. attribute:: instance_name (key)
OSPF Instance Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances.OspfInstance, self).__init__()
self.yang_name = "ospf-instance"
self.yang_parent_name = "ospf-instances"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['instance_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('instance_name', (YLeaf(YType.str, 'instance-name'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.instance_name = None
self.template_name = None
self._segment_path = lambda: "ospf-instance" + "[instance-name='" + str(self.instance_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/ospfv3-protocol/ospf-instances/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances.OspfInstance, ['instance_name', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances.OspfInstance']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol.OspfInstances']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Ospfv3Protocol']['meta_info']
class GenericCounters(_Entity_):
"""
Monitoring for generic\-counters
.. attribute:: interfaces
Monitor an Interface
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.GenericCounters, self).__init__()
self.yang_name = "generic-counters"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("interfaces", ("interfaces", PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces))])
self._leafs = OrderedDict()
self.interfaces = PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces()
self.interfaces.parent = self
self._children_name_map["interfaces"] = "interfaces"
self._segment_path = lambda: "generic-counters"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.GenericCounters, [], name, value)
class Interfaces(_Entity_):
"""
Monitor an Interface
.. attribute:: interface
Interface being Monitored
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces.Interface>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces, self).__init__()
self.yang_name = "interfaces"
self.yang_parent_name = "generic-counters"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("interface", ("interface", PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "interfaces"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/generic-counters/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces, [], name, value)
class Interface(_Entity_):
"""
Interface being Monitored
.. attribute:: interface_name (key)
Interface Name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['interface_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.interface_name = None
self.template_name = None
self._segment_path = lambda: "interface" + "[interface-name='" + str(self.interface_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/generic-counters/interfaces/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces.Interface, ['interface_name', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces.Interface']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.GenericCounters.Interfaces']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.GenericCounters']['meta_info']
class Process(_Entity_):
"""
Collection for a single process
.. attribute:: process_nodes
Node specification
**type**\: :py:class:`ProcessNodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Process, self).__init__()
self.yang_name = "process"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-nodes", ("process_nodes", PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes))])
self._leafs = OrderedDict()
self.process_nodes = PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes()
self.process_nodes.parent = self
self._children_name_map["process_nodes"] = "process-nodes"
self._segment_path = lambda: "process"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Process, [], name, value)
class ProcessNodes(_Entity_):
"""
Node specification
.. attribute:: process_node
Node instance
**type**\: list of :py:class:`ProcessNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes, self).__init__()
self.yang_name = "process-nodes"
self.yang_parent_name = "process"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-node", ("process_node", PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode))])
self._leafs = OrderedDict()
self.process_node = YList(self)
self._segment_path = lambda: "process-nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/process/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes, [], name, value)
class ProcessNode(_Entity_):
"""
Node instance
.. attribute:: node_id (key)
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: pids
Process ID specification
**type**\: :py:class:`Pids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode, self).__init__()
self.yang_name = "process-node"
self.yang_parent_name = "process-nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_id']
self._child_classes = OrderedDict([("pids", ("pids", PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids))])
self._leafs = OrderedDict([
('node_id', (YLeaf(YType.str, 'node-id'), ['str'])),
])
self.node_id = None
self.pids = PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids()
self.pids.parent = self
self._children_name_map["pids"] = "pids"
self._segment_path = lambda: "process-node" + "[node-id='" + str(self.node_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/process/process-nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode, ['node_id'], name, value)
class Pids(_Entity_):
"""
Process ID specification
.. attribute:: pid
Specify an existing template for data collection
**type**\: list of :py:class:`Pid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids.Pid>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids, self).__init__()
self.yang_name = "pids"
self.yang_parent_name = "process-node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("pid", ("pid", PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids.Pid))])
self._leafs = OrderedDict()
self.pid = YList(self)
self._segment_path = lambda: "pids"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids, [], name, value)
class Pid(_Entity_):
"""
Specify an existing template for data
collection
.. attribute:: pid (key)
Specify Process ID
**type**\: int
**range:** 0..4294967295
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids.Pid, self).__init__()
self.yang_name = "pid"
self.yang_parent_name = "pids"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['pid']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pid', (YLeaf(YType.uint32, 'pid'), ['int'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.pid = None
self.template_name = None
self._segment_path = lambda: "pid" + "[pid='" + str(self.pid) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids.Pid, ['pid', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids.Pid']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode.Pids']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes.ProcessNode']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Process.ProcessNodes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Process']['meta_info']
class BasicCounters(_Entity_):
"""
Monitoring for basic\-counters
.. attribute:: interfaces
Monitor an Interface
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.BasicCounters, self).__init__()
self.yang_name = "basic-counters"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("interfaces", ("interfaces", PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces))])
self._leafs = OrderedDict()
self.interfaces = PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces()
self.interfaces.parent = self
self._children_name_map["interfaces"] = "interfaces"
self._segment_path = lambda: "basic-counters"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.BasicCounters, [], name, value)
class Interfaces(_Entity_):
"""
Monitor an Interface
.. attribute:: interface
Interface being Monitored
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces.Interface>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces, self).__init__()
self.yang_name = "interfaces"
self.yang_parent_name = "basic-counters"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("interface", ("interface", PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "interfaces"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/basic-counters/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces, [], name, value)
class Interface(_Entity_):
"""
Interface being Monitored
.. attribute:: interface_name (key)
Interface Name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['interface_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.interface_name = None
self.template_name = None
self._segment_path = lambda: "interface" + "[interface-name='" + str(self.interface_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/basic-counters/interfaces/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces.Interface, ['interface_name', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces.Interface']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.BasicCounters.Interfaces']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.BasicCounters']['meta_info']
class Memory(_Entity_):
"""
Collection for memory
.. attribute:: nodes
Node specification
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Memory.Nodes>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Memory, self).__init__()
self.yang_name = "memory"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("nodes", ("nodes", PerfMgmt.Enable.MonitorEnable.Memory.Nodes))])
self._leafs = OrderedDict()
self.nodes = PerfMgmt.Enable.MonitorEnable.Memory.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self._segment_path = lambda: "memory"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Memory, [], name, value)
class Nodes(_Entity_):
"""
Node specification
.. attribute:: node
Node instance
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Memory.Nodes.Node>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Memory.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "memory"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", PerfMgmt.Enable.MonitorEnable.Memory.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/memory/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Memory.Nodes, [], name, value)
class Node(_Entity_):
"""
Node instance
.. attribute:: node_id (key)
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Memory.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_id', (YLeaf(YType.str, 'node-id'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.node_id = None
self.template_name = None
self._segment_path = lambda: "node" + "[node-id='" + str(self.node_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/memory/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Memory.Nodes.Node, ['node_id', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Memory.Nodes.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Memory.Nodes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Memory']['meta_info']
class Ospfv2Protocol(_Entity_):
"""
Monitor OSPF v2 Protocol
.. attribute:: ospf_instances
Monitor an instance
**type**\: :py:class:`OspfInstances <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol, self).__init__()
self.yang_name = "ospfv2-protocol"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospf-instances", ("ospf_instances", PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances))])
self._leafs = OrderedDict()
self.ospf_instances = PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances()
self.ospf_instances.parent = self
self._children_name_map["ospf_instances"] = "ospf-instances"
self._segment_path = lambda: "ospfv2-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol, [], name, value)
class OspfInstances(_Entity_):
"""
Monitor an instance
.. attribute:: ospf_instance
Instance being monitored
**type**\: list of :py:class:`OspfInstance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances.OspfInstance>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances, self).__init__()
self.yang_name = "ospf-instances"
self.yang_parent_name = "ospfv2-protocol"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospf-instance", ("ospf_instance", PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances.OspfInstance))])
self._leafs = OrderedDict()
self.ospf_instance = YList(self)
self._segment_path = lambda: "ospf-instances"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/ospfv2-protocol/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances, [], name, value)
class OspfInstance(_Entity_):
"""
Instance being monitored
.. attribute:: instance_name (key)
OSPF Instance Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances.OspfInstance, self).__init__()
self.yang_name = "ospf-instance"
self.yang_parent_name = "ospf-instances"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['instance_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('instance_name', (YLeaf(YType.str, 'instance-name'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.instance_name = None
self.template_name = None
self._segment_path = lambda: "ospf-instance" + "[instance-name='" + str(self.instance_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/ospfv2-protocol/ospf-instances/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances.OspfInstance, ['instance_name', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances.OspfInstance']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol.OspfInstances']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Ospfv2Protocol']['meta_info']
class Cpu(_Entity_):
"""
Collection for CPU
.. attribute:: nodes
Node specification
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Cpu.Nodes>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Cpu, self).__init__()
self.yang_name = "cpu"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("nodes", ("nodes", PerfMgmt.Enable.MonitorEnable.Cpu.Nodes))])
self._leafs = OrderedDict()
self.nodes = PerfMgmt.Enable.MonitorEnable.Cpu.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self._segment_path = lambda: "cpu"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Cpu, [], name, value)
class Nodes(_Entity_):
"""
Node specification
.. attribute:: node
Node instance
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Cpu.Nodes.Node>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Cpu.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "cpu"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", PerfMgmt.Enable.MonitorEnable.Cpu.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/cpu/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Cpu.Nodes, [], name, value)
class Node(_Entity_):
"""
Node instance
.. attribute:: node_id (key)
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Cpu.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_id', (YLeaf(YType.str, 'node-id'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.node_id = None
self.template_name = None
self._segment_path = lambda: "node" + "[node-id='" + str(self.node_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/cpu/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Cpu.Nodes.Node, ['node_id', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Cpu.Nodes.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Cpu.Nodes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Cpu']['meta_info']
class Bgp(_Entity_):
"""
Monitor BGP protocol
.. attribute:: neighbors
Monitor BGP protocol for a BGP peer
**type**\: :py:class:`Neighbors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("neighbors", ("neighbors", PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors))])
self._leafs = OrderedDict()
self.neighbors = PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors()
self.neighbors.parent = self
self._children_name_map["neighbors"] = "neighbors"
self._segment_path = lambda: "bgp"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Bgp, [], name, value)
class Neighbors(_Entity_):
"""
Monitor BGP protocol for a BGP peer
.. attribute:: neighbor
Neighbor being monitored
**type**\: list of :py:class:`Neighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors.Neighbor>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors, self).__init__()
self.yang_name = "neighbors"
self.yang_parent_name = "bgp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("neighbor", ("neighbor", PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors.Neighbor))])
self._leafs = OrderedDict()
self.neighbor = YList(self)
self._segment_path = lambda: "neighbors"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/bgp/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors, [], name, value)
class Neighbor(_Entity_):
"""
Neighbor being monitored
.. attribute:: peer_address (key)
IP address of the Neighbor
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors.Neighbor, self).__init__()
self.yang_name = "neighbor"
self.yang_parent_name = "neighbors"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['peer_address']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('peer_address', (YLeaf(YType.str, 'peer-address'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.peer_address = None
self.template_name = None
self._segment_path = lambda: "neighbor" + "[peer-address='" + str(self.peer_address) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/bgp/neighbors/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors.Neighbor, ['peer_address', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors.Neighbor']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Bgp.Neighbors']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.Bgp']['meta_info']
class DataRates(_Entity_):
"""
Monitoring for data\-rates
.. attribute:: interfaces
Monitor an Interface
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.DataRates, self).__init__()
self.yang_name = "data-rates"
self.yang_parent_name = "monitor-enable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("interfaces", ("interfaces", PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces))])
self._leafs = OrderedDict()
self.interfaces = PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces()
self.interfaces.parent = self
self._children_name_map["interfaces"] = "interfaces"
self._segment_path = lambda: "data-rates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.DataRates, [], name, value)
class Interfaces(_Entity_):
"""
Monitor an Interface
.. attribute:: interface
Interface being Monitored
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces.Interface>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces, self).__init__()
self.yang_name = "interfaces"
self.yang_parent_name = "data-rates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("interface", ("interface", PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "interfaces"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/data-rates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces, [], name, value)
class Interface(_Entity_):
"""
Interface being Monitored
.. attribute:: interface_name (key)
Interface Name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: template_name
Template name
**type**\: str
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['interface_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
])
self.interface_name = None
self.template_name = None
self._segment_path = lambda: "interface" + "[interface-name='" + str(self.interface_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/enable/monitor-enable/data-rates/interfaces/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces.Interface, ['interface_name', 'template_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces.Interface']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.DataRates.Interfaces']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable.DataRates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable.MonitorEnable']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Enable']['meta_info']
class RegExpGroups(_Entity_):
"""
Configure regular expression group
.. attribute:: reg_exp_group
Specify regular expression group name
**type**\: list of :py:class:`RegExpGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.RegExpGroups.RegExpGroup>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.RegExpGroups, self).__init__()
self.yang_name = "reg-exp-groups"
self.yang_parent_name = "perf-mgmt"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("reg-exp-group", ("reg_exp_group", PerfMgmt.RegExpGroups.RegExpGroup))])
self._leafs = OrderedDict()
self.reg_exp_group = YList(self)
self._segment_path = lambda: "reg-exp-groups"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.RegExpGroups, [], name, value)
class RegExpGroup(_Entity_):
"""
Specify regular expression group name
.. attribute:: reg_exp_group_name (key)
Regular expression group name
**type**\: str
**length:** 1..32
.. attribute:: reg_exps
Configure regular expression
**type**\: :py:class:`RegExps <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.RegExpGroups.RegExpGroup.RegExps>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.RegExpGroups.RegExpGroup, self).__init__()
self.yang_name = "reg-exp-group"
self.yang_parent_name = "reg-exp-groups"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['reg_exp_group_name']
self._child_classes = OrderedDict([("reg-exps", ("reg_exps", PerfMgmt.RegExpGroups.RegExpGroup.RegExps))])
self._leafs = OrderedDict([
('reg_exp_group_name', (YLeaf(YType.str, 'reg-exp-group-name'), ['str'])),
])
self.reg_exp_group_name = None
self.reg_exps = PerfMgmt.RegExpGroups.RegExpGroup.RegExps()
self.reg_exps.parent = self
self._children_name_map["reg_exps"] = "reg-exps"
self._segment_path = lambda: "reg-exp-group" + "[reg-exp-group-name='" + str(self.reg_exp_group_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/reg-exp-groups/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.RegExpGroups.RegExpGroup, ['reg_exp_group_name'], name, value)
class RegExps(_Entity_):
"""
Configure regular expression
.. attribute:: reg_exp
Specify regular expression index number
**type**\: list of :py:class:`RegExp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.RegExpGroups.RegExpGroup.RegExps.RegExp>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.RegExpGroups.RegExpGroup.RegExps, self).__init__()
self.yang_name = "reg-exps"
self.yang_parent_name = "reg-exp-group"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("reg-exp", ("reg_exp", PerfMgmt.RegExpGroups.RegExpGroup.RegExps.RegExp))])
self._leafs = OrderedDict()
self.reg_exp = YList(self)
self._segment_path = lambda: "reg-exps"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.RegExpGroups.RegExpGroup.RegExps, [], name, value)
class RegExp(_Entity_):
"""
Specify regular expression index number
.. attribute:: reg_exp_index (key)
Regular expression index number
**type**\: int
**range:** 1..100
.. attribute:: reg_exp_string
Regular expression string to match
**type**\: str
**length:** 1..128
**mandatory**\: True
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.RegExpGroups.RegExpGroup.RegExps.RegExp, self).__init__()
self.yang_name = "reg-exp"
self.yang_parent_name = "reg-exps"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['reg_exp_index']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reg_exp_index', (YLeaf(YType.uint32, 'reg-exp-index'), ['int'])),
('reg_exp_string', (YLeaf(YType.str, 'reg-exp-string'), ['str'])),
])
self.reg_exp_index = None
self.reg_exp_string = None
self._segment_path = lambda: "reg-exp" + "[reg-exp-index='" + str(self.reg_exp_index) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.RegExpGroups.RegExpGroup.RegExps.RegExp, ['reg_exp_index', 'reg_exp_string'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.RegExpGroups.RegExpGroup.RegExps.RegExp']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.RegExpGroups.RegExpGroup.RegExps']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.RegExpGroups.RegExpGroup']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.RegExpGroups']['meta_info']
class Threshold(_Entity_):
"""
Container for threshold templates
.. attribute:: generic_counter_interface
Interface Generic Counter threshold configuration
**type**\: :py:class:`GenericCounterInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface>`
.. attribute:: ldp_mpls
MPLS LDP threshold configuration
**type**\: :py:class:`LdpMpls <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls>`
.. attribute:: basic_counter_interface
Interface Basic Counter threshold configuration
**type**\: :py:class:`BasicCounterInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface>`
.. attribute:: bgp
BGP threshold configuration
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp>`
.. attribute:: ospfv2_protocol
OSPF v2 Protocol threshold configuration
**type**\: :py:class:`Ospfv2Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol>`
.. attribute:: cpu_node
Node CPU threshold configuration
**type**\: :py:class:`CpuNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.CpuNode>`
.. attribute:: data_rate_interface
Interface Data Rates threshold configuration
**type**\: :py:class:`DataRateInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface>`
.. attribute:: process_node
Node Process threshold configuration
**type**\: :py:class:`ProcessNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.ProcessNode>`
.. attribute:: memory_node
Node Memory threshold configuration
**type**\: :py:class:`MemoryNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.MemoryNode>`
.. attribute:: ospfv3_protocol
OSPF v2 Protocol threshold configuration
**type**\: :py:class:`Ospfv3Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold, self).__init__()
self.yang_name = "threshold"
self.yang_parent_name = "perf-mgmt"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("generic-counter-interface", ("generic_counter_interface", PerfMgmt.Threshold.GenericCounterInterface)), ("ldp-mpls", ("ldp_mpls", PerfMgmt.Threshold.LdpMpls)), ("basic-counter-interface", ("basic_counter_interface", PerfMgmt.Threshold.BasicCounterInterface)), ("bgp", ("bgp", PerfMgmt.Threshold.Bgp)), ("ospfv2-protocol", ("ospfv2_protocol", PerfMgmt.Threshold.Ospfv2Protocol)), ("cpu-node", ("cpu_node", PerfMgmt.Threshold.CpuNode)), ("data-rate-interface", ("data_rate_interface", PerfMgmt.Threshold.DataRateInterface)), ("process-node", ("process_node", PerfMgmt.Threshold.ProcessNode)), ("memory-node", ("memory_node", PerfMgmt.Threshold.MemoryNode)), ("ospfv3-protocol", ("ospfv3_protocol", PerfMgmt.Threshold.Ospfv3Protocol))])
self._leafs = OrderedDict()
self.generic_counter_interface = PerfMgmt.Threshold.GenericCounterInterface()
self.generic_counter_interface.parent = self
self._children_name_map["generic_counter_interface"] = "generic-counter-interface"
self.ldp_mpls = PerfMgmt.Threshold.LdpMpls()
self.ldp_mpls.parent = self
self._children_name_map["ldp_mpls"] = "ldp-mpls"
self.basic_counter_interface = PerfMgmt.Threshold.BasicCounterInterface()
self.basic_counter_interface.parent = self
self._children_name_map["basic_counter_interface"] = "basic-counter-interface"
self.bgp = PerfMgmt.Threshold.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self.ospfv2_protocol = PerfMgmt.Threshold.Ospfv2Protocol()
self.ospfv2_protocol.parent = self
self._children_name_map["ospfv2_protocol"] = "ospfv2-protocol"
self.cpu_node = PerfMgmt.Threshold.CpuNode()
self.cpu_node.parent = self
self._children_name_map["cpu_node"] = "cpu-node"
self.data_rate_interface = PerfMgmt.Threshold.DataRateInterface()
self.data_rate_interface.parent = self
self._children_name_map["data_rate_interface"] = "data-rate-interface"
self.process_node = PerfMgmt.Threshold.ProcessNode()
self.process_node.parent = self
self._children_name_map["process_node"] = "process-node"
self.memory_node = PerfMgmt.Threshold.MemoryNode()
self.memory_node.parent = self
self._children_name_map["memory_node"] = "memory-node"
self.ospfv3_protocol = PerfMgmt.Threshold.Ospfv3Protocol()
self.ospfv3_protocol.parent = self
self._children_name_map["ospfv3_protocol"] = "ospfv3-protocol"
self._segment_path = lambda: "threshold"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold, [], name, value)
class GenericCounterInterface(_Entity_):
"""
Interface Generic Counter threshold
configuration
.. attribute:: generic_counter_interface_templates
Interface Generic Counter threshold templates
**type**\: :py:class:`GenericCounterInterfaceTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface, self).__init__()
self.yang_name = "generic-counter-interface"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("generic-counter-interface-templates", ("generic_counter_interface_templates", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates))])
self._leafs = OrderedDict()
self.generic_counter_interface_templates = PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates()
self.generic_counter_interface_templates.parent = self
self._children_name_map["generic_counter_interface_templates"] = "generic-counter-interface-templates"
self._segment_path = lambda: "generic-counter-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface, [], name, value)
class GenericCounterInterfaceTemplates(_Entity_):
"""
Interface Generic Counter threshold templates
.. attribute:: generic_counter_interface_template
Interface Generic Counter threshold template instance
**type**\: list of :py:class:`GenericCounterInterfaceTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates, self).__init__()
self.yang_name = "generic-counter-interface-templates"
self.yang_parent_name = "generic-counter-interface"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("generic-counter-interface-template", ("generic_counter_interface_template", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate))])
self._leafs = OrderedDict()
self.generic_counter_interface_template = YList(self)
self._segment_path = lambda: "generic-counter-interface-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/generic-counter-interface/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates, [], name, value)
class GenericCounterInterfaceTemplate(_Entity_):
"""
Interface Generic Counter threshold template
instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: in_octets
Number of inbound octets/bytes
**type**\: :py:class:`InOctets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InOctets>`
**presence node**\: True
.. attribute:: in_ucast_pkts
Number of inbound unicast packets
**type**\: :py:class:`InUcastPkts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InUcastPkts>`
**presence node**\: True
.. attribute:: out_ucast_pkts
Number of outbound unicast packets
**type**\: :py:class:`OutUcastPkts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutUcastPkts>`
**presence node**\: True
.. attribute:: out_broadcast_pkts
Number of outbound broadcast packets
**type**\: :py:class:`OutBroadcastPkts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutBroadcastPkts>`
**presence node**\: True
.. attribute:: out_multicast_pkts
Number of outbound multicast packets
**type**\: :py:class:`OutMulticastPkts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutMulticastPkts>`
**presence node**\: True
.. attribute:: input_overrun
Number of inbound packets with overrun errors
**type**\: :py:class:`InputOverrun <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputOverrun>`
**presence node**\: True
.. attribute:: out_octets
Number of outbound octets/bytes
**type**\: :py:class:`OutOctets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutOctets>`
**presence node**\: True
.. attribute:: output_underrun
Number of outbound packets with underrun errors
**type**\: :py:class:`OutputUnderrun <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputUnderrun>`
**presence node**\: True
.. attribute:: input_total_errors
Number of inbound incorrect packets discarded
**type**\: :py:class:`InputTotalErrors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalErrors>`
**presence node**\: True
.. attribute:: output_total_drops
Number of outbound correct packets discarded
**type**\: :py:class:`OutputTotalDrops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalDrops>`
**presence node**\: True
.. attribute:: input_crc
Number of inbound packets discarded with incorrect CRC
**type**\: :py:class:`InputCrc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputCrc>`
**presence node**\: True
.. attribute:: in_broadcast_pkts
Number of inbound broadcast packets
**type**\: :py:class:`InBroadcastPkts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InBroadcastPkts>`
**presence node**\: True
.. attribute:: in_multicast_pkts
Number of inbound multicast packets
**type**\: :py:class:`InMulticastPkts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InMulticastPkts>`
**presence node**\: True
.. attribute:: out_packets
Number of outbound packets
**type**\: :py:class:`OutPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutPackets>`
**presence node**\: True
.. attribute:: output_total_errors
Number of outbound incorrect packets discarded
**type**\: :py:class:`OutputTotalErrors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalErrors>`
**presence node**\: True
.. attribute:: in_packets
Number of inbound packets
**type**\: :py:class:`InPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InPackets>`
**presence node**\: True
.. attribute:: input_unknown_proto
Number of inbound packets discarded with unknown protocol
**type**\: :py:class:`InputUnknownProto <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputUnknownProto>`
**presence node**\: True
.. attribute:: input_queue_drops
Number of input queue drops
**type**\: :py:class:`InputQueueDrops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputQueueDrops>`
**presence node**\: True
.. attribute:: input_total_drops
Number of inbound correct packets discarded
**type**\: :py:class:`InputTotalDrops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalDrops>`
**presence node**\: True
.. attribute:: input_frame
Number of inbound packets with framing errors
**type**\: :py:class:`InputFrame <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputFrame>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: vrf_group
Enable instance filtering by VRF name regular expression
**type**\: str
**length:** 1..32
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate, self).__init__()
self.yang_name = "generic-counter-interface-template"
self.yang_parent_name = "generic-counter-interface-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("in-octets", ("in_octets", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InOctets)), ("in-ucast-pkts", ("in_ucast_pkts", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InUcastPkts)), ("out-ucast-pkts", ("out_ucast_pkts", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutUcastPkts)), ("out-broadcast-pkts", ("out_broadcast_pkts", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutBroadcastPkts)), ("out-multicast-pkts", ("out_multicast_pkts", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutMulticastPkts)), ("input-overrun", ("input_overrun", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputOverrun)), ("out-octets", ("out_octets", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutOctets)), ("output-underrun", ("output_underrun", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputUnderrun)), ("input-total-errors", ("input_total_errors", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalErrors)), ("output-total-drops", ("output_total_drops", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalDrops)), ("input-crc", ("input_crc", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputCrc)), ("in-broadcast-pkts", ("in_broadcast_pkts", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InBroadcastPkts)), ("in-multicast-pkts", ("in_multicast_pkts", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InMulticastPkts)), ("out-packets", ("out_packets", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutPackets)), ("output-total-errors", ("output_total_errors", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalErrors)), ("in-packets", ("in_packets", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InPackets)), ("input-unknown-proto", ("input_unknown_proto", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputUnknownProto)), ("input-queue-drops", ("input_queue_drops", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputQueueDrops)), ("input-total-drops", ("input_total_drops", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalDrops)), ("input-frame", ("input_frame", PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputFrame))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
])
self.template_name = None
self.sample_interval = None
self.reg_exp_group = None
self.vrf_group = None
self.in_octets = None
self._children_name_map["in_octets"] = "in-octets"
self.in_ucast_pkts = None
self._children_name_map["in_ucast_pkts"] = "in-ucast-pkts"
self.out_ucast_pkts = None
self._children_name_map["out_ucast_pkts"] = "out-ucast-pkts"
self.out_broadcast_pkts = None
self._children_name_map["out_broadcast_pkts"] = "out-broadcast-pkts"
self.out_multicast_pkts = None
self._children_name_map["out_multicast_pkts"] = "out-multicast-pkts"
self.input_overrun = None
self._children_name_map["input_overrun"] = "input-overrun"
self.out_octets = None
self._children_name_map["out_octets"] = "out-octets"
self.output_underrun = None
self._children_name_map["output_underrun"] = "output-underrun"
self.input_total_errors = None
self._children_name_map["input_total_errors"] = "input-total-errors"
self.output_total_drops = None
self._children_name_map["output_total_drops"] = "output-total-drops"
self.input_crc = None
self._children_name_map["input_crc"] = "input-crc"
self.in_broadcast_pkts = None
self._children_name_map["in_broadcast_pkts"] = "in-broadcast-pkts"
self.in_multicast_pkts = None
self._children_name_map["in_multicast_pkts"] = "in-multicast-pkts"
self.out_packets = None
self._children_name_map["out_packets"] = "out-packets"
self.output_total_errors = None
self._children_name_map["output_total_errors"] = "output-total-errors"
self.in_packets = None
self._children_name_map["in_packets"] = "in-packets"
self.input_unknown_proto = None
self._children_name_map["input_unknown_proto"] = "input-unknown-proto"
self.input_queue_drops = None
self._children_name_map["input_queue_drops"] = "input-queue-drops"
self.input_total_drops = None
self._children_name_map["input_total_drops"] = "input-total-drops"
self.input_frame = None
self._children_name_map["input_frame"] = "input-frame"
self._segment_path = lambda: "generic-counter-interface-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/generic-counter-interface/generic-counter-interface-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate, ['template_name', 'sample_interval', 'reg_exp_group', 'vrf_group'], name, value)
class InOctets(_Entity_):
"""
Number of inbound octets/bytes
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InOctets, self).__init__()
self.yang_name = "in-octets"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "in-octets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InOctets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InOctets']['meta_info']
class InUcastPkts(_Entity_):
"""
Number of inbound unicast packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InUcastPkts, self).__init__()
self.yang_name = "in-ucast-pkts"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "in-ucast-pkts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InUcastPkts, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InUcastPkts']['meta_info']
class OutUcastPkts(_Entity_):
"""
Number of outbound unicast packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutUcastPkts, self).__init__()
self.yang_name = "out-ucast-pkts"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "out-ucast-pkts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutUcastPkts, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutUcastPkts']['meta_info']
class OutBroadcastPkts(_Entity_):
"""
Number of outbound broadcast packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutBroadcastPkts, self).__init__()
self.yang_name = "out-broadcast-pkts"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "out-broadcast-pkts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutBroadcastPkts, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutBroadcastPkts']['meta_info']
class OutMulticastPkts(_Entity_):
"""
Number of outbound multicast packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutMulticastPkts, self).__init__()
self.yang_name = "out-multicast-pkts"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "out-multicast-pkts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutMulticastPkts, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutMulticastPkts']['meta_info']
class InputOverrun(_Entity_):
"""
Number of inbound packets with overrun
errors
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputOverrun, self).__init__()
self.yang_name = "input-overrun"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-overrun"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputOverrun, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputOverrun']['meta_info']
class OutOctets(_Entity_):
"""
Number of outbound octets/bytes
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutOctets, self).__init__()
self.yang_name = "out-octets"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "out-octets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutOctets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutOctets']['meta_info']
class OutputUnderrun(_Entity_):
"""
Number of outbound packets with underrun
errors
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputUnderrun, self).__init__()
self.yang_name = "output-underrun"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-underrun"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputUnderrun, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputUnderrun']['meta_info']
class InputTotalErrors(_Entity_):
"""
Number of inbound incorrect packets
discarded
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalErrors, self).__init__()
self.yang_name = "input-total-errors"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-total-errors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalErrors, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalErrors']['meta_info']
class OutputTotalDrops(_Entity_):
"""
Number of outbound correct packets discarded
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalDrops, self).__init__()
self.yang_name = "output-total-drops"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-total-drops"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalDrops, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalDrops']['meta_info']
class InputCrc(_Entity_):
"""
Number of inbound packets discarded with
incorrect CRC
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputCrc, self).__init__()
self.yang_name = "input-crc"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-crc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputCrc, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputCrc']['meta_info']
class InBroadcastPkts(_Entity_):
"""
Number of inbound broadcast packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InBroadcastPkts, self).__init__()
self.yang_name = "in-broadcast-pkts"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "in-broadcast-pkts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InBroadcastPkts, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InBroadcastPkts']['meta_info']
class InMulticastPkts(_Entity_):
"""
Number of inbound multicast packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InMulticastPkts, self).__init__()
self.yang_name = "in-multicast-pkts"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "in-multicast-pkts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InMulticastPkts, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InMulticastPkts']['meta_info']
class OutPackets(_Entity_):
"""
Number of outbound packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutPackets, self).__init__()
self.yang_name = "out-packets"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "out-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutPackets']['meta_info']
class OutputTotalErrors(_Entity_):
"""
Number of outbound incorrect packets
discarded
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalErrors, self).__init__()
self.yang_name = "output-total-errors"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-total-errors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalErrors, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.OutputTotalErrors']['meta_info']
class InPackets(_Entity_):
"""
Number of inbound packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InPackets, self).__init__()
self.yang_name = "in-packets"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "in-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InPackets']['meta_info']
class InputUnknownProto(_Entity_):
"""
Number of inbound packets discarded with
unknown protocol
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputUnknownProto, self).__init__()
self.yang_name = "input-unknown-proto"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-unknown-proto"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputUnknownProto, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputUnknownProto']['meta_info']
class InputQueueDrops(_Entity_):
"""
Number of input queue drops
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputQueueDrops, self).__init__()
self.yang_name = "input-queue-drops"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-queue-drops"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputQueueDrops, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputQueueDrops']['meta_info']
class InputTotalDrops(_Entity_):
"""
Number of inbound correct packets discarded
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalDrops, self).__init__()
self.yang_name = "input-total-drops"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-total-drops"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalDrops, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputTotalDrops']['meta_info']
class InputFrame(_Entity_):
"""
Number of inbound packets with framing
errors
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputFrame, self).__init__()
self.yang_name = "input-frame"
self.yang_parent_name = "generic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-frame"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputFrame, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate.InputFrame']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates.GenericCounterInterfaceTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface.GenericCounterInterfaceTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.GenericCounterInterface']['meta_info']
class LdpMpls(_Entity_):
"""
MPLS LDP threshold configuration
.. attribute:: ldp_mpls_templates
MPLS LDP threshold templates
**type**\: :py:class:`LdpMplsTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls, self).__init__()
self.yang_name = "ldp-mpls"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ldp-mpls-templates", ("ldp_mpls_templates", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates))])
self._leafs = OrderedDict()
self.ldp_mpls_templates = PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates()
self.ldp_mpls_templates.parent = self
self._children_name_map["ldp_mpls_templates"] = "ldp-mpls-templates"
self._segment_path = lambda: "ldp-mpls"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls, [], name, value)
class LdpMplsTemplates(_Entity_):
"""
MPLS LDP threshold templates
.. attribute:: ldp_mpls_template
MPLS LDP threshold template instance
**type**\: list of :py:class:`LdpMplsTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates, self).__init__()
self.yang_name = "ldp-mpls-templates"
self.yang_parent_name = "ldp-mpls"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ldp-mpls-template", ("ldp_mpls_template", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate))])
self._leafs = OrderedDict()
self.ldp_mpls_template = YList(self)
self._segment_path = lambda: "ldp-mpls-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/ldp-mpls/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates, [], name, value)
class LdpMplsTemplate(_Entity_):
"""
MPLS LDP threshold template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: address_withdraw_msgs_rcvd
Number of Address Withdraw messages received
**type**\: :py:class:`AddressWithdrawMsgsRcvd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsRcvd>`
**presence node**\: True
.. attribute:: label_withdraw_msgs_rcvd
Number of Label Withdraw messages received
**type**\: :py:class:`LabelWithdrawMsgsRcvd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsRcvd>`
**presence node**\: True
.. attribute:: address_withdraw_msgs_sent
Number of Address Withdraw messages sent
**type**\: :py:class:`AddressWithdrawMsgsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsSent>`
**presence node**\: True
.. attribute:: label_withdraw_msgs_sent
Number of Label Withdraw messages sent
**type**\: :py:class:`LabelWithdrawMsgsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsSent>`
**presence node**\: True
.. attribute:: notification_msgs_rcvd
Number of Notification messages received
**type**\: :py:class:`NotificationMsgsRcvd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsRcvd>`
**presence node**\: True
.. attribute:: total_msgs_rcvd
Total number of messages received
**type**\: :py:class:`TotalMsgsRcvd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsRcvd>`
**presence node**\: True
.. attribute:: notification_msgs_sent
Number of Notification messages sent
**type**\: :py:class:`NotificationMsgsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsSent>`
**presence node**\: True
.. attribute:: total_msgs_sent
Total number of messages sent
**type**\: :py:class:`TotalMsgsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsSent>`
**presence node**\: True
.. attribute:: label_release_msgs_rcvd
Number of LAbel Release messages received
**type**\: :py:class:`LabelReleaseMsgsRcvd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsRcvd>`
**presence node**\: True
.. attribute:: init_msgs_rcvd
Number of Init messages received
**type**\: :py:class:`InitMsgsRcvd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsRcvd>`
**presence node**\: True
.. attribute:: label_release_msgs_sent
Number of Label Release messages sent
**type**\: :py:class:`LabelReleaseMsgsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsSent>`
**presence node**\: True
.. attribute:: init_msgs_sent
Number of Init messages sent
**type**\: :py:class:`InitMsgsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsSent>`
**presence node**\: True
.. attribute:: label_mapping_msgs_rcvd
Number of Label Mapping messages received
**type**\: :py:class:`LabelMappingMsgsRcvd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsRcvd>`
**presence node**\: True
.. attribute:: keepalive_msgs_rcvd
Number of Keepalive messages received
**type**\: :py:class:`KeepaliveMsgsRcvd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsRcvd>`
**presence node**\: True
.. attribute:: label_mapping_msgs_sent
Number of Label Mapping messages sent
**type**\: :py:class:`LabelMappingMsgsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsSent>`
**presence node**\: True
.. attribute:: keepalive_msgs_sent
Number of Keepalive messages sent
**type**\: :py:class:`KeepaliveMsgsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsSent>`
**presence node**\: True
.. attribute:: address_msgs_rcvd
Number of Address messages received
**type**\: :py:class:`AddressMsgsRcvd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsRcvd>`
**presence node**\: True
.. attribute:: address_msgs_sent
Number of Address messages sent
**type**\: :py:class:`AddressMsgsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsSent>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate, self).__init__()
self.yang_name = "ldp-mpls-template"
self.yang_parent_name = "ldp-mpls-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("address-withdraw-msgs-rcvd", ("address_withdraw_msgs_rcvd", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsRcvd)), ("label-withdraw-msgs-rcvd", ("label_withdraw_msgs_rcvd", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsRcvd)), ("address-withdraw-msgs-sent", ("address_withdraw_msgs_sent", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsSent)), ("label-withdraw-msgs-sent", ("label_withdraw_msgs_sent", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsSent)), ("notification-msgs-rcvd", ("notification_msgs_rcvd", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsRcvd)), ("total-msgs-rcvd", ("total_msgs_rcvd", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsRcvd)), ("notification-msgs-sent", ("notification_msgs_sent", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsSent)), ("total-msgs-sent", ("total_msgs_sent", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsSent)), ("label-release-msgs-rcvd", ("label_release_msgs_rcvd", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsRcvd)), ("init-msgs-rcvd", ("init_msgs_rcvd", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsRcvd)), ("label-release-msgs-sent", ("label_release_msgs_sent", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsSent)), ("init-msgs-sent", ("init_msgs_sent", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsSent)), ("label-mapping-msgs-rcvd", ("label_mapping_msgs_rcvd", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsRcvd)), ("keepalive-msgs-rcvd", ("keepalive_msgs_rcvd", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsRcvd)), ("label-mapping-msgs-sent", ("label_mapping_msgs_sent", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsSent)), ("keepalive-msgs-sent", ("keepalive_msgs_sent", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsSent)), ("address-msgs-rcvd", ("address_msgs_rcvd", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsRcvd)), ("address-msgs-sent", ("address_msgs_sent", PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsSent))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
])
self.template_name = None
self.sample_interval = None
self.address_withdraw_msgs_rcvd = None
self._children_name_map["address_withdraw_msgs_rcvd"] = "address-withdraw-msgs-rcvd"
self.label_withdraw_msgs_rcvd = None
self._children_name_map["label_withdraw_msgs_rcvd"] = "label-withdraw-msgs-rcvd"
self.address_withdraw_msgs_sent = None
self._children_name_map["address_withdraw_msgs_sent"] = "address-withdraw-msgs-sent"
self.label_withdraw_msgs_sent = None
self._children_name_map["label_withdraw_msgs_sent"] = "label-withdraw-msgs-sent"
self.notification_msgs_rcvd = None
self._children_name_map["notification_msgs_rcvd"] = "notification-msgs-rcvd"
self.total_msgs_rcvd = None
self._children_name_map["total_msgs_rcvd"] = "total-msgs-rcvd"
self.notification_msgs_sent = None
self._children_name_map["notification_msgs_sent"] = "notification-msgs-sent"
self.total_msgs_sent = None
self._children_name_map["total_msgs_sent"] = "total-msgs-sent"
self.label_release_msgs_rcvd = None
self._children_name_map["label_release_msgs_rcvd"] = "label-release-msgs-rcvd"
self.init_msgs_rcvd = None
self._children_name_map["init_msgs_rcvd"] = "init-msgs-rcvd"
self.label_release_msgs_sent = None
self._children_name_map["label_release_msgs_sent"] = "label-release-msgs-sent"
self.init_msgs_sent = None
self._children_name_map["init_msgs_sent"] = "init-msgs-sent"
self.label_mapping_msgs_rcvd = None
self._children_name_map["label_mapping_msgs_rcvd"] = "label-mapping-msgs-rcvd"
self.keepalive_msgs_rcvd = None
self._children_name_map["keepalive_msgs_rcvd"] = "keepalive-msgs-rcvd"
self.label_mapping_msgs_sent = None
self._children_name_map["label_mapping_msgs_sent"] = "label-mapping-msgs-sent"
self.keepalive_msgs_sent = None
self._children_name_map["keepalive_msgs_sent"] = "keepalive-msgs-sent"
self.address_msgs_rcvd = None
self._children_name_map["address_msgs_rcvd"] = "address-msgs-rcvd"
self.address_msgs_sent = None
self._children_name_map["address_msgs_sent"] = "address-msgs-sent"
self._segment_path = lambda: "ldp-mpls-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/ldp-mpls/ldp-mpls-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate, ['template_name', 'sample_interval'], name, value)
class AddressWithdrawMsgsRcvd(_Entity_):
"""
Number of Address Withdraw messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsRcvd, self).__init__()
self.yang_name = "address-withdraw-msgs-rcvd"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "address-withdraw-msgs-rcvd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsRcvd, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsRcvd']['meta_info']
class LabelWithdrawMsgsRcvd(_Entity_):
"""
Number of Label Withdraw messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsRcvd, self).__init__()
self.yang_name = "label-withdraw-msgs-rcvd"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "label-withdraw-msgs-rcvd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsRcvd, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsRcvd']['meta_info']
class AddressWithdrawMsgsSent(_Entity_):
"""
Number of Address Withdraw messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsSent, self).__init__()
self.yang_name = "address-withdraw-msgs-sent"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "address-withdraw-msgs-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressWithdrawMsgsSent']['meta_info']
class LabelWithdrawMsgsSent(_Entity_):
"""
Number of Label Withdraw messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsSent, self).__init__()
self.yang_name = "label-withdraw-msgs-sent"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "label-withdraw-msgs-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelWithdrawMsgsSent']['meta_info']
class NotificationMsgsRcvd(_Entity_):
"""
Number of Notification messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsRcvd, self).__init__()
self.yang_name = "notification-msgs-rcvd"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "notification-msgs-rcvd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsRcvd, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsRcvd']['meta_info']
class TotalMsgsRcvd(_Entity_):
"""
Total number of messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..65536
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..65536
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsRcvd, self).__init__()
self.yang_name = "total-msgs-rcvd"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "total-msgs-rcvd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsRcvd, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsRcvd']['meta_info']
class NotificationMsgsSent(_Entity_):
"""
Number of Notification messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsSent, self).__init__()
self.yang_name = "notification-msgs-sent"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "notification-msgs-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.NotificationMsgsSent']['meta_info']
class TotalMsgsSent(_Entity_):
"""
Total number of messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsSent, self).__init__()
self.yang_name = "total-msgs-sent"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "total-msgs-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.TotalMsgsSent']['meta_info']
class LabelReleaseMsgsRcvd(_Entity_):
"""
Number of LAbel Release messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsRcvd, self).__init__()
self.yang_name = "label-release-msgs-rcvd"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "label-release-msgs-rcvd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsRcvd, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsRcvd']['meta_info']
class InitMsgsRcvd(_Entity_):
"""
Number of Init messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsRcvd, self).__init__()
self.yang_name = "init-msgs-rcvd"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "init-msgs-rcvd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsRcvd, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsRcvd']['meta_info']
class LabelReleaseMsgsSent(_Entity_):
"""
Number of Label Release messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsSent, self).__init__()
self.yang_name = "label-release-msgs-sent"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "label-release-msgs-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelReleaseMsgsSent']['meta_info']
class InitMsgsSent(_Entity_):
"""
Number of Init messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsSent, self).__init__()
self.yang_name = "init-msgs-sent"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "init-msgs-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.InitMsgsSent']['meta_info']
class LabelMappingMsgsRcvd(_Entity_):
"""
Number of Label Mapping messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsRcvd, self).__init__()
self.yang_name = "label-mapping-msgs-rcvd"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "label-mapping-msgs-rcvd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsRcvd, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsRcvd']['meta_info']
class KeepaliveMsgsRcvd(_Entity_):
"""
Number of Keepalive messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsRcvd, self).__init__()
self.yang_name = "keepalive-msgs-rcvd"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "keepalive-msgs-rcvd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsRcvd, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsRcvd']['meta_info']
class LabelMappingMsgsSent(_Entity_):
"""
Number of Label Mapping messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsSent, self).__init__()
self.yang_name = "label-mapping-msgs-sent"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "label-mapping-msgs-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.LabelMappingMsgsSent']['meta_info']
class KeepaliveMsgsSent(_Entity_):
"""
Number of Keepalive messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsSent, self).__init__()
self.yang_name = "keepalive-msgs-sent"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "keepalive-msgs-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.KeepaliveMsgsSent']['meta_info']
class AddressMsgsRcvd(_Entity_):
"""
Number of Address messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsRcvd, self).__init__()
self.yang_name = "address-msgs-rcvd"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "address-msgs-rcvd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsRcvd, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsRcvd']['meta_info']
class AddressMsgsSent(_Entity_):
"""
Number of Address messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsSent, self).__init__()
self.yang_name = "address-msgs-sent"
self.yang_parent_name = "ldp-mpls-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "address-msgs-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate.AddressMsgsSent']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates.LdpMplsTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls.LdpMplsTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.LdpMpls']['meta_info']
class BasicCounterInterface(_Entity_):
"""
Interface Basic Counter threshold configuration
.. attribute:: basic_counter_interface_templates
Interface Basic Counter threshold templates
**type**\: :py:class:`BasicCounterInterfaceTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface, self).__init__()
self.yang_name = "basic-counter-interface"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-counter-interface-templates", ("basic_counter_interface_templates", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates))])
self._leafs = OrderedDict()
self.basic_counter_interface_templates = PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates()
self.basic_counter_interface_templates.parent = self
self._children_name_map["basic_counter_interface_templates"] = "basic-counter-interface-templates"
self._segment_path = lambda: "basic-counter-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface, [], name, value)
class BasicCounterInterfaceTemplates(_Entity_):
"""
Interface Basic Counter threshold templates
.. attribute:: basic_counter_interface_template
Interface Basic Counter threshold template instance
**type**\: list of :py:class:`BasicCounterInterfaceTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates, self).__init__()
self.yang_name = "basic-counter-interface-templates"
self.yang_parent_name = "basic-counter-interface"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("basic-counter-interface-template", ("basic_counter_interface_template", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate))])
self._leafs = OrderedDict()
self.basic_counter_interface_template = YList(self)
self._segment_path = lambda: "basic-counter-interface-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/basic-counter-interface/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates, [], name, value)
class BasicCounterInterfaceTemplate(_Entity_):
"""
Interface Basic Counter threshold template
instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: in_octets
Number of inbound octets/bytes
**type**\: :py:class:`InOctets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InOctets>`
**presence node**\: True
.. attribute:: out_octets
Number of outbound octets/bytes
**type**\: :py:class:`OutOctets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutOctets>`
**presence node**\: True
.. attribute:: output_queue_drops
Number of outbound queue drops
**type**\: :py:class:`OutputQueueDrops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputQueueDrops>`
**presence node**\: True
.. attribute:: input_total_errors
Number of inbound incorrect packets discarded
**type**\: :py:class:`InputTotalErrors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalErrors>`
**presence node**\: True
.. attribute:: output_total_drops
Number of outbound correct packets discarded
**type**\: :py:class:`OutputTotalDrops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalDrops>`
**presence node**\: True
.. attribute:: out_packets
Number of outbound packets
**type**\: :py:class:`OutPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutPackets>`
**presence node**\: True
.. attribute:: output_total_errors
Number of outbound incorrect packets discarded
**type**\: :py:class:`OutputTotalErrors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalErrors>`
**presence node**\: True
.. attribute:: in_packets
Number of inbound packets
**type**\: :py:class:`InPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InPackets>`
**presence node**\: True
.. attribute:: input_queue_drops
Number of input queue drops
**type**\: :py:class:`InputQueueDrops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputQueueDrops>`
**presence node**\: True
.. attribute:: input_total_drops
Number of inbound correct packets discarded
**type**\: :py:class:`InputTotalDrops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalDrops>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: vrf_group
Enable instance filtering by VRF name regular expression
**type**\: str
**length:** 1..32
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate, self).__init__()
self.yang_name = "basic-counter-interface-template"
self.yang_parent_name = "basic-counter-interface-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("in-octets", ("in_octets", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InOctets)), ("out-octets", ("out_octets", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutOctets)), ("output-queue-drops", ("output_queue_drops", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputQueueDrops)), ("input-total-errors", ("input_total_errors", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalErrors)), ("output-total-drops", ("output_total_drops", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalDrops)), ("out-packets", ("out_packets", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutPackets)), ("output-total-errors", ("output_total_errors", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalErrors)), ("in-packets", ("in_packets", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InPackets)), ("input-queue-drops", ("input_queue_drops", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputQueueDrops)), ("input-total-drops", ("input_total_drops", PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalDrops))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
])
self.template_name = None
self.sample_interval = None
self.reg_exp_group = None
self.vrf_group = None
self.in_octets = None
self._children_name_map["in_octets"] = "in-octets"
self.out_octets = None
self._children_name_map["out_octets"] = "out-octets"
self.output_queue_drops = None
self._children_name_map["output_queue_drops"] = "output-queue-drops"
self.input_total_errors = None
self._children_name_map["input_total_errors"] = "input-total-errors"
self.output_total_drops = None
self._children_name_map["output_total_drops"] = "output-total-drops"
self.out_packets = None
self._children_name_map["out_packets"] = "out-packets"
self.output_total_errors = None
self._children_name_map["output_total_errors"] = "output-total-errors"
self.in_packets = None
self._children_name_map["in_packets"] = "in-packets"
self.input_queue_drops = None
self._children_name_map["input_queue_drops"] = "input-queue-drops"
self.input_total_drops = None
self._children_name_map["input_total_drops"] = "input-total-drops"
self._segment_path = lambda: "basic-counter-interface-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/basic-counter-interface/basic-counter-interface-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate, ['template_name', 'sample_interval', 'reg_exp_group', 'vrf_group'], name, value)
class InOctets(_Entity_):
"""
Number of inbound octets/bytes
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InOctets, self).__init__()
self.yang_name = "in-octets"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "in-octets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InOctets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InOctets']['meta_info']
class OutOctets(_Entity_):
"""
Number of outbound octets/bytes
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutOctets, self).__init__()
self.yang_name = "out-octets"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "out-octets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutOctets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutOctets']['meta_info']
class OutputQueueDrops(_Entity_):
"""
Number of outbound queue drops
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputQueueDrops, self).__init__()
self.yang_name = "output-queue-drops"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-queue-drops"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputQueueDrops, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputQueueDrops']['meta_info']
class InputTotalErrors(_Entity_):
"""
Number of inbound incorrect packets
discarded
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalErrors, self).__init__()
self.yang_name = "input-total-errors"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-total-errors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalErrors, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalErrors']['meta_info']
class OutputTotalDrops(_Entity_):
"""
Number of outbound correct packets discarded
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalDrops, self).__init__()
self.yang_name = "output-total-drops"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-total-drops"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalDrops, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalDrops']['meta_info']
class OutPackets(_Entity_):
"""
Number of outbound packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutPackets, self).__init__()
self.yang_name = "out-packets"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "out-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutPackets']['meta_info']
class OutputTotalErrors(_Entity_):
"""
Number of outbound incorrect packets
discarded
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalErrors, self).__init__()
self.yang_name = "output-total-errors"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-total-errors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalErrors, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.OutputTotalErrors']['meta_info']
class InPackets(_Entity_):
"""
Number of inbound packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InPackets, self).__init__()
self.yang_name = "in-packets"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "in-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InPackets']['meta_info']
class InputQueueDrops(_Entity_):
"""
Number of input queue drops
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputQueueDrops, self).__init__()
self.yang_name = "input-queue-drops"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-queue-drops"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputQueueDrops, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputQueueDrops']['meta_info']
class InputTotalDrops(_Entity_):
"""
Number of inbound correct packets discarded
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalDrops, self).__init__()
self.yang_name = "input-total-drops"
self.yang_parent_name = "basic-counter-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-total-drops"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalDrops, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate.InputTotalDrops']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates.BasicCounterInterfaceTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface.BasicCounterInterfaceTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.BasicCounterInterface']['meta_info']
class Bgp(_Entity_):
"""
BGP threshold configuration
.. attribute:: bgp_templates
BGP threshold templates
**type**\: :py:class:`BgpTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("bgp-templates", ("bgp_templates", PerfMgmt.Threshold.Bgp.BgpTemplates))])
self._leafs = OrderedDict()
self.bgp_templates = PerfMgmt.Threshold.Bgp.BgpTemplates()
self.bgp_templates.parent = self
self._children_name_map["bgp_templates"] = "bgp-templates"
self._segment_path = lambda: "bgp"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp, [], name, value)
class BgpTemplates(_Entity_):
"""
BGP threshold templates
.. attribute:: bgp_template
BGP threshold template instance
**type**\: list of :py:class:`BgpTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates, self).__init__()
self.yang_name = "bgp-templates"
self.yang_parent_name = "bgp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("bgp-template", ("bgp_template", PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate))])
self._leafs = OrderedDict()
self.bgp_template = YList(self)
self._segment_path = lambda: "bgp-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/bgp/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates, [], name, value)
class BgpTemplate(_Entity_):
"""
BGP threshold template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: output_update_messages
Number of update messages sent
**type**\: :py:class:`OutputUpdateMessages <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputUpdateMessages>`
**presence node**\: True
.. attribute:: errors_received
Number of error notifications received
**type**\: :py:class:`ErrorsReceived <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsReceived>`
**presence node**\: True
.. attribute:: conn_established
Number of times the connection was established
**type**\: :py:class:`ConnEstablished <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnEstablished>`
**presence node**\: True
.. attribute:: output_messages
Number of messages sent
**type**\: :py:class:`OutputMessages <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputMessages>`
**presence node**\: True
.. attribute:: conn_dropped
Number of times the connection was dropped
**type**\: :py:class:`ConnDropped <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnDropped>`
**presence node**\: True
.. attribute:: input_update_messages
Number of update messages received
**type**\: :py:class:`InputUpdateMessages <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputUpdateMessages>`
**presence node**\: True
.. attribute:: errors_sent
Number of error notifications sent
**type**\: :py:class:`ErrorsSent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsSent>`
**presence node**\: True
.. attribute:: input_messages
Number of messages received
**type**\: :py:class:`InputMessages <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputMessages>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate, self).__init__()
self.yang_name = "bgp-template"
self.yang_parent_name = "bgp-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("output-update-messages", ("output_update_messages", PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputUpdateMessages)), ("errors-received", ("errors_received", PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsReceived)), ("conn-established", ("conn_established", PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnEstablished)), ("output-messages", ("output_messages", PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputMessages)), ("conn-dropped", ("conn_dropped", PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnDropped)), ("input-update-messages", ("input_update_messages", PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputUpdateMessages)), ("errors-sent", ("errors_sent", PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsSent)), ("input-messages", ("input_messages", PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputMessages))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
])
self.template_name = None
self.sample_interval = None
self.output_update_messages = None
self._children_name_map["output_update_messages"] = "output-update-messages"
self.errors_received = None
self._children_name_map["errors_received"] = "errors-received"
self.conn_established = None
self._children_name_map["conn_established"] = "conn-established"
self.output_messages = None
self._children_name_map["output_messages"] = "output-messages"
self.conn_dropped = None
self._children_name_map["conn_dropped"] = "conn-dropped"
self.input_update_messages = None
self._children_name_map["input_update_messages"] = "input-update-messages"
self.errors_sent = None
self._children_name_map["errors_sent"] = "errors-sent"
self.input_messages = None
self._children_name_map["input_messages"] = "input-messages"
self._segment_path = lambda: "bgp-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/bgp/bgp-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate, ['template_name', 'sample_interval'], name, value)
class OutputUpdateMessages(_Entity_):
"""
Number of update messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputUpdateMessages, self).__init__()
self.yang_name = "output-update-messages"
self.yang_parent_name = "bgp-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-update-messages"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputUpdateMessages, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputUpdateMessages']['meta_info']
class ErrorsReceived(_Entity_):
"""
Number of error notifications received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsReceived, self).__init__()
self.yang_name = "errors-received"
self.yang_parent_name = "bgp-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "errors-received"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsReceived, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsReceived']['meta_info']
class ConnEstablished(_Entity_):
"""
Number of times the connection was
established
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnEstablished, self).__init__()
self.yang_name = "conn-established"
self.yang_parent_name = "bgp-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "conn-established"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnEstablished, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnEstablished']['meta_info']
class OutputMessages(_Entity_):
"""
Number of messages sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputMessages, self).__init__()
self.yang_name = "output-messages"
self.yang_parent_name = "bgp-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-messages"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputMessages, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.OutputMessages']['meta_info']
class ConnDropped(_Entity_):
"""
Number of times the connection was dropped
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnDropped, self).__init__()
self.yang_name = "conn-dropped"
self.yang_parent_name = "bgp-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "conn-dropped"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnDropped, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ConnDropped']['meta_info']
class InputUpdateMessages(_Entity_):
"""
Number of update messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputUpdateMessages, self).__init__()
self.yang_name = "input-update-messages"
self.yang_parent_name = "bgp-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-update-messages"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputUpdateMessages, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputUpdateMessages']['meta_info']
class ErrorsSent(_Entity_):
"""
Number of error notifications sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsSent, self).__init__()
self.yang_name = "errors-sent"
self.yang_parent_name = "bgp-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "errors-sent"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsSent, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.ErrorsSent']['meta_info']
class InputMessages(_Entity_):
"""
Number of messages received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputMessages, self).__init__()
self.yang_name = "input-messages"
self.yang_parent_name = "bgp-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-messages"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputMessages, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate.InputMessages']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates.BgpTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp.BgpTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Bgp']['meta_info']
class Ospfv2Protocol(_Entity_):
"""
OSPF v2 Protocol threshold configuration
.. attribute:: ospfv2_protocol_templates
OSPF v2 Protocol threshold templates
**type**\: :py:class:`Ospfv2ProtocolTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol, self).__init__()
self.yang_name = "ospfv2-protocol"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospfv2-protocol-templates", ("ospfv2_protocol_templates", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates))])
self._leafs = OrderedDict()
self.ospfv2_protocol_templates = PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates()
self.ospfv2_protocol_templates.parent = self
self._children_name_map["ospfv2_protocol_templates"] = "ospfv2-protocol-templates"
self._segment_path = lambda: "ospfv2-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol, [], name, value)
class Ospfv2ProtocolTemplates(_Entity_):
"""
OSPF v2 Protocol threshold templates
.. attribute:: ospfv2_protocol_template
OSPF v2 Protocol threshold template instance
**type**\: list of :py:class:`Ospfv2ProtocolTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates, self).__init__()
self.yang_name = "ospfv2-protocol-templates"
self.yang_parent_name = "ospfv2-protocol"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospfv2-protocol-template", ("ospfv2_protocol_template", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate))])
self._leafs = OrderedDict()
self.ospfv2_protocol_template = YList(self)
self._segment_path = lambda: "ospfv2-protocol-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/ospfv2-protocol/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates, [], name, value)
class Ospfv2ProtocolTemplate(_Entity_):
"""
OSPF v2 Protocol threshold template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: checksum_errors
Number of packets received with checksum errors
**type**\: :py:class:`ChecksumErrors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.ChecksumErrors>`
**presence node**\: True
.. attribute:: input_lsa_acks_lsa
Number of LSA received in LSA Acknowledgements
**type**\: :py:class:`InputLsaAcksLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcksLsa>`
**presence node**\: True
.. attribute:: output_db_ds_lsa
Number of LSA sent in DBD packets
**type**\: :py:class:`OutputDbDsLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDsLsa>`
**presence node**\: True
.. attribute:: input_db_ds_lsa
Number of LSA received in DBD packets
**type**\: :py:class:`InputDbDsLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDsLsa>`
**presence node**\: True
.. attribute:: input_lsa_updates
Number of LSA Updates received
**type**\: :py:class:`InputLsaUpdates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdates>`
**presence node**\: True
.. attribute:: output_db_ds
Number of DBD packets sent
**type**\: :py:class:`OutputDbDs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDs>`
**presence node**\: True
.. attribute:: output_lsa_updates_lsa
Number of LSA sent in LSA Updates
**type**\: :py:class:`OutputLsaUpdatesLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdatesLsa>`
**presence node**\: True
.. attribute:: input_db_ds
Number of DBD packets received
**type**\: :py:class:`InputDbDs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDs>`
**presence node**\: True
.. attribute:: input_lsa_updates_lsa
Number of LSA received in LSA Updates
**type**\: :py:class:`InputLsaUpdatesLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdatesLsa>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: output_packets
Total number of packets sent
**type**\: :py:class:`OutputPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputPackets>`
**presence node**\: True
.. attribute:: input_packets
Total number of packets received
**type**\: :py:class:`InputPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputPackets>`
**presence node**\: True
.. attribute:: output_hello_packets
Total number of packets sent
**type**\: :py:class:`OutputHelloPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputHelloPackets>`
**presence node**\: True
.. attribute:: input_hello_packets
Number of Hello packets received
**type**\: :py:class:`InputHelloPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputHelloPackets>`
**presence node**\: True
.. attribute:: output_ls_requests
Number of LS Requests sent
**type**\: :py:class:`OutputLsRequests <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequests>`
**presence node**\: True
.. attribute:: output_lsa_acks_lsa
Number of LSA sent in LSA Acknowledgements
**type**\: :py:class:`OutputLsaAcksLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcksLsa>`
**presence node**\: True
.. attribute:: output_lsa_acks
Number of LSA Acknowledgements sent
**type**\: :py:class:`OutputLsaAcks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcks>`
**presence node**\: True
.. attribute:: input_lsa_acks
Number of LSA Acknowledgements received
**type**\: :py:class:`InputLsaAcks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcks>`
**presence node**\: True
.. attribute:: output_lsa_updates
Number of LSA Updates sent
**type**\: :py:class:`OutputLsaUpdates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdates>`
**presence node**\: True
.. attribute:: output_ls_requests_lsa
Number of LSA sent in LS Requests
**type**\: :py:class:`OutputLsRequestsLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequestsLsa>`
**presence node**\: True
.. attribute:: input_ls_requests_lsa
Number of LSA received in LS Requests
**type**\: :py:class:`InputLsRequestsLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequestsLsa>`
**presence node**\: True
.. attribute:: input_ls_requests
Number of LS Requests received
**type**\: :py:class:`InputLsRequests <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequests>`
**presence node**\: True
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate, self).__init__()
self.yang_name = "ospfv2-protocol-template"
self.yang_parent_name = "ospfv2-protocol-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("checksum-errors", ("checksum_errors", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.ChecksumErrors)), ("input-lsa-acks-lsa", ("input_lsa_acks_lsa", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcksLsa)), ("output-db-ds-lsa", ("output_db_ds_lsa", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDsLsa)), ("input-db-ds-lsa", ("input_db_ds_lsa", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDsLsa)), ("input-lsa-updates", ("input_lsa_updates", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdates)), ("output-db-ds", ("output_db_ds", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDs)), ("output-lsa-updates-lsa", ("output_lsa_updates_lsa", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdatesLsa)), ("input-db-ds", ("input_db_ds", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDs)), ("input-lsa-updates-lsa", ("input_lsa_updates_lsa", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdatesLsa)), ("output-packets", ("output_packets", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputPackets)), ("input-packets", ("input_packets", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputPackets)), ("output-hello-packets", ("output_hello_packets", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputHelloPackets)), ("input-hello-packets", ("input_hello_packets", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputHelloPackets)), ("output-ls-requests", ("output_ls_requests", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequests)), ("output-lsa-acks-lsa", ("output_lsa_acks_lsa", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcksLsa)), ("output-lsa-acks", ("output_lsa_acks", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcks)), ("input-lsa-acks", ("input_lsa_acks", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcks)), ("output-lsa-updates", ("output_lsa_updates", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdates)), ("output-ls-requests-lsa", ("output_ls_requests_lsa", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequestsLsa)), ("input-ls-requests-lsa", ("input_ls_requests_lsa", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequestsLsa)), ("input-ls-requests", ("input_ls_requests", PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequests))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
])
self.template_name = None
self.sample_interval = None
self.checksum_errors = None
self._children_name_map["checksum_errors"] = "checksum-errors"
self.input_lsa_acks_lsa = None
self._children_name_map["input_lsa_acks_lsa"] = "input-lsa-acks-lsa"
self.output_db_ds_lsa = None
self._children_name_map["output_db_ds_lsa"] = "output-db-ds-lsa"
self.input_db_ds_lsa = None
self._children_name_map["input_db_ds_lsa"] = "input-db-ds-lsa"
self.input_lsa_updates = None
self._children_name_map["input_lsa_updates"] = "input-lsa-updates"
self.output_db_ds = None
self._children_name_map["output_db_ds"] = "output-db-ds"
self.output_lsa_updates_lsa = None
self._children_name_map["output_lsa_updates_lsa"] = "output-lsa-updates-lsa"
self.input_db_ds = None
self._children_name_map["input_db_ds"] = "input-db-ds"
self.input_lsa_updates_lsa = None
self._children_name_map["input_lsa_updates_lsa"] = "input-lsa-updates-lsa"
self.output_packets = None
self._children_name_map["output_packets"] = "output-packets"
self.input_packets = None
self._children_name_map["input_packets"] = "input-packets"
self.output_hello_packets = None
self._children_name_map["output_hello_packets"] = "output-hello-packets"
self.input_hello_packets = None
self._children_name_map["input_hello_packets"] = "input-hello-packets"
self.output_ls_requests = None
self._children_name_map["output_ls_requests"] = "output-ls-requests"
self.output_lsa_acks_lsa = None
self._children_name_map["output_lsa_acks_lsa"] = "output-lsa-acks-lsa"
self.output_lsa_acks = None
self._children_name_map["output_lsa_acks"] = "output-lsa-acks"
self.input_lsa_acks = None
self._children_name_map["input_lsa_acks"] = "input-lsa-acks"
self.output_lsa_updates = None
self._children_name_map["output_lsa_updates"] = "output-lsa-updates"
self.output_ls_requests_lsa = None
self._children_name_map["output_ls_requests_lsa"] = "output-ls-requests-lsa"
self.input_ls_requests_lsa = None
self._children_name_map["input_ls_requests_lsa"] = "input-ls-requests-lsa"
self.input_ls_requests = None
self._children_name_map["input_ls_requests"] = "input-ls-requests"
self._segment_path = lambda: "ospfv2-protocol-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/ospfv2-protocol/ospfv2-protocol-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate, ['template_name', 'sample_interval'], name, value)
class ChecksumErrors(_Entity_):
"""
Number of packets received with checksum
errors
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.ChecksumErrors, self).__init__()
self.yang_name = "checksum-errors"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "checksum-errors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.ChecksumErrors, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.ChecksumErrors']['meta_info']
class InputLsaAcksLsa(_Entity_):
"""
Number of LSA received in LSA Acknowledgements
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcksLsa, self).__init__()
self.yang_name = "input-lsa-acks-lsa"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-lsa-acks-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcksLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcksLsa']['meta_info']
class OutputDbDsLsa(_Entity_):
"""
Number of LSA sent in DBD packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDsLsa, self).__init__()
self.yang_name = "output-db-ds-lsa"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-db-ds-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDsLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDsLsa']['meta_info']
class InputDbDsLsa(_Entity_):
"""
Number of LSA received in DBD packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDsLsa, self).__init__()
self.yang_name = "input-db-ds-lsa"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-db-ds-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDsLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDsLsa']['meta_info']
class InputLsaUpdates(_Entity_):
"""
Number of LSA Updates received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdates, self).__init__()
self.yang_name = "input-lsa-updates"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-lsa-updates"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdates, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdates']['meta_info']
class OutputDbDs(_Entity_):
"""
Number of DBD packets sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDs, self).__init__()
self.yang_name = "output-db-ds"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-db-ds"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDs, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputDbDs']['meta_info']
class OutputLsaUpdatesLsa(_Entity_):
"""
Number of LSA sent in LSA Updates
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdatesLsa, self).__init__()
self.yang_name = "output-lsa-updates-lsa"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-lsa-updates-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdatesLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdatesLsa']['meta_info']
class InputDbDs(_Entity_):
"""
Number of DBD packets received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDs, self).__init__()
self.yang_name = "input-db-ds"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-db-ds"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDs, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputDbDs']['meta_info']
class InputLsaUpdatesLsa(_Entity_):
"""
Number of LSA received in LSA Updates
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdatesLsa, self).__init__()
self.yang_name = "input-lsa-updates-lsa"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-lsa-updates-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdatesLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaUpdatesLsa']['meta_info']
class OutputPackets(_Entity_):
"""
Total number of packets sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputPackets, self).__init__()
self.yang_name = "output-packets"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputPackets']['meta_info']
class InputPackets(_Entity_):
"""
Total number of packets received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputPackets, self).__init__()
self.yang_name = "input-packets"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputPackets']['meta_info']
class OutputHelloPackets(_Entity_):
"""
Total number of packets sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputHelloPackets, self).__init__()
self.yang_name = "output-hello-packets"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-hello-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputHelloPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputHelloPackets']['meta_info']
class InputHelloPackets(_Entity_):
"""
Number of Hello packets received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputHelloPackets, self).__init__()
self.yang_name = "input-hello-packets"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-hello-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputHelloPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputHelloPackets']['meta_info']
class OutputLsRequests(_Entity_):
"""
Number of LS Requests sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequests, self).__init__()
self.yang_name = "output-ls-requests"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-ls-requests"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequests, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequests']['meta_info']
class OutputLsaAcksLsa(_Entity_):
"""
Number of LSA sent in LSA Acknowledgements
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcksLsa, self).__init__()
self.yang_name = "output-lsa-acks-lsa"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-lsa-acks-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcksLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcksLsa']['meta_info']
class OutputLsaAcks(_Entity_):
"""
Number of LSA Acknowledgements sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcks, self).__init__()
self.yang_name = "output-lsa-acks"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-lsa-acks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcks, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaAcks']['meta_info']
class InputLsaAcks(_Entity_):
"""
Number of LSA Acknowledgements received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcks, self).__init__()
self.yang_name = "input-lsa-acks"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-lsa-acks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcks, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsaAcks']['meta_info']
class OutputLsaUpdates(_Entity_):
"""
Number of LSA Updates sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdates, self).__init__()
self.yang_name = "output-lsa-updates"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-lsa-updates"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdates, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsaUpdates']['meta_info']
class OutputLsRequestsLsa(_Entity_):
"""
Number of LSA sent in LS Requests
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequestsLsa, self).__init__()
self.yang_name = "output-ls-requests-lsa"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-ls-requests-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequestsLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.OutputLsRequestsLsa']['meta_info']
class InputLsRequestsLsa(_Entity_):
"""
Number of LSA received in LS Requests
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequestsLsa, self).__init__()
self.yang_name = "input-ls-requests-lsa"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-ls-requests-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequestsLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequestsLsa']['meta_info']
class InputLsRequests(_Entity_):
"""
Number of LS Requests received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequests, self).__init__()
self.yang_name = "input-ls-requests"
self.yang_parent_name = "ospfv2-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-ls-requests"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequests, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate.InputLsRequests']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates.Ospfv2ProtocolTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol.Ospfv2ProtocolTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv2Protocol']['meta_info']
class CpuNode(_Entity_):
"""
Node CPU threshold configuration
.. attribute:: cpu_node_templates
Node CPU threshold configuration templates
**type**\: :py:class:`CpuNodeTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.CpuNode.CpuNodeTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.CpuNode, self).__init__()
self.yang_name = "cpu-node"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("cpu-node-templates", ("cpu_node_templates", PerfMgmt.Threshold.CpuNode.CpuNodeTemplates))])
self._leafs = OrderedDict()
self.cpu_node_templates = PerfMgmt.Threshold.CpuNode.CpuNodeTemplates()
self.cpu_node_templates.parent = self
self._children_name_map["cpu_node_templates"] = "cpu-node-templates"
self._segment_path = lambda: "cpu-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.CpuNode, [], name, value)
class CpuNodeTemplates(_Entity_):
"""
Node CPU threshold configuration templates
.. attribute:: cpu_node_template
Node CPU threshold configuration template instances
**type**\: list of :py:class:`CpuNodeTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.CpuNode.CpuNodeTemplates, self).__init__()
self.yang_name = "cpu-node-templates"
self.yang_parent_name = "cpu-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("cpu-node-template", ("cpu_node_template", PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate))])
self._leafs = OrderedDict()
self.cpu_node_template = YList(self)
self._segment_path = lambda: "cpu-node-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/cpu-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.CpuNode.CpuNodeTemplates, [], name, value)
class CpuNodeTemplate(_Entity_):
"""
Node CPU threshold configuration template
instances
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: average_cpu_used
Average %CPU utilization
**type**\: :py:class:`AverageCpuUsed <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.AverageCpuUsed>`
**presence node**\: True
.. attribute:: no_processes
Number of processes
**type**\: :py:class:`NoProcesses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.NoProcesses>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate, self).__init__()
self.yang_name = "cpu-node-template"
self.yang_parent_name = "cpu-node-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("average-cpu-used", ("average_cpu_used", PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.AverageCpuUsed)), ("no-processes", ("no_processes", PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.NoProcesses))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
])
self.template_name = None
self.sample_interval = None
self.average_cpu_used = None
self._children_name_map["average_cpu_used"] = "average-cpu-used"
self.no_processes = None
self._children_name_map["no_processes"] = "no-processes"
self._segment_path = lambda: "cpu-node-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/cpu-node/cpu-node-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate, ['template_name', 'sample_interval'], name, value)
class AverageCpuUsed(_Entity_):
"""
Average %CPU utilization
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..100
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..100
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.AverageCpuUsed, self).__init__()
self.yang_name = "average-cpu-used"
self.yang_parent_name = "cpu-node-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "average-cpu-used"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.AverageCpuUsed, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.AverageCpuUsed']['meta_info']
class NoProcesses(_Entity_):
"""
Number of processes
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.NoProcesses, self).__init__()
self.yang_name = "no-processes"
self.yang_parent_name = "cpu-node-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "no-processes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.NoProcesses, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate.NoProcesses']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.CpuNode.CpuNodeTemplates.CpuNodeTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.CpuNode.CpuNodeTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.CpuNode']['meta_info']
class DataRateInterface(_Entity_):
"""
Interface Data Rates threshold configuration
.. attribute:: data_rate_interface_templates
Interface Data Rates threshold templates
**type**\: :py:class:`DataRateInterfaceTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface, self).__init__()
self.yang_name = "data-rate-interface"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("data-rate-interface-templates", ("data_rate_interface_templates", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates))])
self._leafs = OrderedDict()
self.data_rate_interface_templates = PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates()
self.data_rate_interface_templates.parent = self
self._children_name_map["data_rate_interface_templates"] = "data-rate-interface-templates"
self._segment_path = lambda: "data-rate-interface"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface, [], name, value)
class DataRateInterfaceTemplates(_Entity_):
"""
Interface Data Rates threshold templates
.. attribute:: data_rate_interface_template
Interface Data Rates threshold template instance
**type**\: list of :py:class:`DataRateInterfaceTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates, self).__init__()
self.yang_name = "data-rate-interface-templates"
self.yang_parent_name = "data-rate-interface"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("data-rate-interface-template", ("data_rate_interface_template", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate))])
self._leafs = OrderedDict()
self.data_rate_interface_template = YList(self)
self._segment_path = lambda: "data-rate-interface-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/data-rate-interface/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates, [], name, value)
class DataRateInterfaceTemplate(_Entity_):
"""
Interface Data Rates threshold template
instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: input_data_rate
Input data rate in kbps
**type**\: :py:class:`InputDataRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputDataRate>`
**presence node**\: True
.. attribute:: bandwidth
Bandwidth in kbps
**type**\: :py:class:`Bandwidth <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.Bandwidth>`
**presence node**\: True
.. attribute:: output_packet_rate
Number of Output packets per second
**type**\: :py:class:`OutputPacketRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPacketRate>`
**presence node**\: True
.. attribute:: input_peak_pkts
Maximum number of input packets per second
**type**\: :py:class:`InputPeakPkts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakPkts>`
**presence node**\: True
.. attribute:: output_peak_rate
Peak output data rate in kbps
**type**\: :py:class:`OutputPeakRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakRate>`
**presence node**\: True
.. attribute:: output_data_rate
Output data rate in kbps
**type**\: :py:class:`OutputDataRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputDataRate>`
**presence node**\: True
.. attribute:: input_packet_rate
Number of input packets per second
**type**\: :py:class:`InputPacketRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPacketRate>`
**presence node**\: True
.. attribute:: output_peak_pkts
Maximum number of output packets per second
**type**\: :py:class:`OutputPeakPkts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakPkts>`
**presence node**\: True
.. attribute:: input_peak_rate
Peak input data rate in kbps
**type**\: :py:class:`InputPeakRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakRate>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: reg_exp_group
Enable instance filtering by regular expression
**type**\: str
**length:** 1..32
.. attribute:: vrf_group
Enable instance filtering by VRF name regular expression
**type**\: str
**length:** 1..32
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate, self).__init__()
self.yang_name = "data-rate-interface-template"
self.yang_parent_name = "data-rate-interface-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("input-data-rate", ("input_data_rate", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputDataRate)), ("bandwidth", ("bandwidth", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.Bandwidth)), ("output-packet-rate", ("output_packet_rate", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPacketRate)), ("input-peak-pkts", ("input_peak_pkts", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakPkts)), ("output-peak-rate", ("output_peak_rate", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakRate)), ("output-data-rate", ("output_data_rate", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputDataRate)), ("input-packet-rate", ("input_packet_rate", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPacketRate)), ("output-peak-pkts", ("output_peak_pkts", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakPkts)), ("input-peak-rate", ("input_peak_rate", PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakRate))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
('reg_exp_group', (YLeaf(YType.str, 'reg-exp-group'), ['str'])),
('vrf_group', (YLeaf(YType.str, 'vrf-group'), ['str'])),
])
self.template_name = None
self.sample_interval = None
self.reg_exp_group = None
self.vrf_group = None
self.input_data_rate = None
self._children_name_map["input_data_rate"] = "input-data-rate"
self.bandwidth = None
self._children_name_map["bandwidth"] = "bandwidth"
self.output_packet_rate = None
self._children_name_map["output_packet_rate"] = "output-packet-rate"
self.input_peak_pkts = None
self._children_name_map["input_peak_pkts"] = "input-peak-pkts"
self.output_peak_rate = None
self._children_name_map["output_peak_rate"] = "output-peak-rate"
self.output_data_rate = None
self._children_name_map["output_data_rate"] = "output-data-rate"
self.input_packet_rate = None
self._children_name_map["input_packet_rate"] = "input-packet-rate"
self.output_peak_pkts = None
self._children_name_map["output_peak_pkts"] = "output-peak-pkts"
self.input_peak_rate = None
self._children_name_map["input_peak_rate"] = "input-peak-rate"
self._segment_path = lambda: "data-rate-interface-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/data-rate-interface/data-rate-interface-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate, ['template_name', 'sample_interval', 'reg_exp_group', 'vrf_group'], name, value)
class InputDataRate(_Entity_):
"""
Input data rate in kbps
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputDataRate, self).__init__()
self.yang_name = "input-data-rate"
self.yang_parent_name = "data-rate-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-data-rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputDataRate, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputDataRate']['meta_info']
class Bandwidth(_Entity_):
"""
Bandwidth in kbps
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.Bandwidth, self).__init__()
self.yang_name = "bandwidth"
self.yang_parent_name = "data-rate-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "bandwidth"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.Bandwidth, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.Bandwidth']['meta_info']
class OutputPacketRate(_Entity_):
"""
Number of Output packets per second
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPacketRate, self).__init__()
self.yang_name = "output-packet-rate"
self.yang_parent_name = "data-rate-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-packet-rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPacketRate, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPacketRate']['meta_info']
class InputPeakPkts(_Entity_):
"""
Maximum number of input packets per second
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakPkts, self).__init__()
self.yang_name = "input-peak-pkts"
self.yang_parent_name = "data-rate-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-peak-pkts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakPkts, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakPkts']['meta_info']
class OutputPeakRate(_Entity_):
"""
Peak output data rate in kbps
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakRate, self).__init__()
self.yang_name = "output-peak-rate"
self.yang_parent_name = "data-rate-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-peak-rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakRate, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakRate']['meta_info']
class OutputDataRate(_Entity_):
"""
Output data rate in kbps
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputDataRate, self).__init__()
self.yang_name = "output-data-rate"
self.yang_parent_name = "data-rate-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-data-rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputDataRate, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputDataRate']['meta_info']
class InputPacketRate(_Entity_):
"""
Number of input packets per second
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPacketRate, self).__init__()
self.yang_name = "input-packet-rate"
self.yang_parent_name = "data-rate-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-packet-rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPacketRate, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPacketRate']['meta_info']
class OutputPeakPkts(_Entity_):
"""
Maximum number of output packets per second
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakPkts, self).__init__()
self.yang_name = "output-peak-pkts"
self.yang_parent_name = "data-rate-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-peak-pkts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakPkts, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.OutputPeakPkts']['meta_info']
class InputPeakRate(_Entity_):
"""
Peak input data rate in kbps
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakRate, self).__init__()
self.yang_name = "input-peak-rate"
self.yang_parent_name = "data-rate-interface-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-peak-rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakRate, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate.InputPeakRate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates.DataRateInterfaceTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface.DataRateInterfaceTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.DataRateInterface']['meta_info']
class ProcessNode(_Entity_):
"""
Node Process threshold configuration
.. attribute:: process_node_templates
Node Memory threshold templates
**type**\: :py:class:`ProcessNodeTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.ProcessNode, self).__init__()
self.yang_name = "process-node"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-node-templates", ("process_node_templates", PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates))])
self._leafs = OrderedDict()
self.process_node_templates = PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates()
self.process_node_templates.parent = self
self._children_name_map["process_node_templates"] = "process-node-templates"
self._segment_path = lambda: "process-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.ProcessNode, [], name, value)
class ProcessNodeTemplates(_Entity_):
"""
Node Memory threshold templates
.. attribute:: process_node_template
Node Memory threshold template instance
**type**\: list of :py:class:`ProcessNodeTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates, self).__init__()
self.yang_name = "process-node-templates"
self.yang_parent_name = "process-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-node-template", ("process_node_template", PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate))])
self._leafs = OrderedDict()
self.process_node_template = YList(self)
self._segment_path = lambda: "process-node-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/process-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates, [], name, value)
class ProcessNodeTemplate(_Entity_):
"""
Node Memory threshold template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: average_cpu_used
Average %CPU utilization
**type**\: :py:class:`AverageCpuUsed <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.AverageCpuUsed>`
**presence node**\: True
.. attribute:: peak_memory
Max memory (KBytes) used since startup time
**type**\: :py:class:`PeakMemory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.PeakMemory>`
**presence node**\: True
.. attribute:: no_threads
Number of threads
**type**\: :py:class:`NoThreads <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.NoThreads>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate, self).__init__()
self.yang_name = "process-node-template"
self.yang_parent_name = "process-node-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("average-cpu-used", ("average_cpu_used", PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.AverageCpuUsed)), ("peak-memory", ("peak_memory", PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.PeakMemory)), ("no-threads", ("no_threads", PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.NoThreads))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
])
self.template_name = None
self.sample_interval = None
self.average_cpu_used = None
self._children_name_map["average_cpu_used"] = "average-cpu-used"
self.peak_memory = None
self._children_name_map["peak_memory"] = "peak-memory"
self.no_threads = None
self._children_name_map["no_threads"] = "no-threads"
self._segment_path = lambda: "process-node-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/process-node/process-node-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate, ['template_name', 'sample_interval'], name, value)
class AverageCpuUsed(_Entity_):
"""
Average %CPU utilization
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..100
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..100
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.AverageCpuUsed, self).__init__()
self.yang_name = "average-cpu-used"
self.yang_parent_name = "process-node-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "average-cpu-used"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.AverageCpuUsed, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.AverageCpuUsed']['meta_info']
class PeakMemory(_Entity_):
"""
Max memory (KBytes) used since startup time
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.PeakMemory, self).__init__()
self.yang_name = "peak-memory"
self.yang_parent_name = "process-node-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "peak-memory"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.PeakMemory, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.PeakMemory']['meta_info']
class NoThreads(_Entity_):
"""
Number of threads
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..32767
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..32767
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.NoThreads, self).__init__()
self.yang_name = "no-threads"
self.yang_parent_name = "process-node-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "no-threads"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.NoThreads, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate.NoThreads']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates.ProcessNodeTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.ProcessNode.ProcessNodeTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.ProcessNode']['meta_info']
class MemoryNode(_Entity_):
"""
Node Memory threshold configuration
.. attribute:: memory_node_templates
Node Memory threshold configuration templates
**type**\: :py:class:`MemoryNodeTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.MemoryNode, self).__init__()
self.yang_name = "memory-node"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("memory-node-templates", ("memory_node_templates", PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates))])
self._leafs = OrderedDict()
self.memory_node_templates = PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates()
self.memory_node_templates.parent = self
self._children_name_map["memory_node_templates"] = "memory-node-templates"
self._segment_path = lambda: "memory-node"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.MemoryNode, [], name, value)
class MemoryNodeTemplates(_Entity_):
"""
Node Memory threshold configuration templates
.. attribute:: memory_node_template
Node Memory threshold configuration template instance
**type**\: list of :py:class:`MemoryNodeTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates, self).__init__()
self.yang_name = "memory-node-templates"
self.yang_parent_name = "memory-node"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("memory-node-template", ("memory_node_template", PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate))])
self._leafs = OrderedDict()
self.memory_node_template = YList(self)
self._segment_path = lambda: "memory-node-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/memory-node/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates, [], name, value)
class MemoryNodeTemplate(_Entity_):
"""
Node Memory threshold configuration template
instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: peak_memory
Maximum memory (KBytes) used
**type**\: :py:class:`PeakMemory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.PeakMemory>`
**presence node**\: True
.. attribute:: curr_memory
Current memory (Bytes) in use
**type**\: :py:class:`CurrMemory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.CurrMemory>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate, self).__init__()
self.yang_name = "memory-node-template"
self.yang_parent_name = "memory-node-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("peak-memory", ("peak_memory", PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.PeakMemory)), ("curr-memory", ("curr_memory", PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.CurrMemory))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
])
self.template_name = None
self.sample_interval = None
self.peak_memory = None
self._children_name_map["peak_memory"] = "peak-memory"
self.curr_memory = None
self._children_name_map["curr_memory"] = "curr-memory"
self._segment_path = lambda: "memory-node-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/memory-node/memory-node-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate, ['template_name', 'sample_interval'], name, value)
class PeakMemory(_Entity_):
"""
Maximum memory (KBytes) used
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4194304
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4194304
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.PeakMemory, self).__init__()
self.yang_name = "peak-memory"
self.yang_parent_name = "memory-node-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "peak-memory"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.PeakMemory, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.PeakMemory']['meta_info']
class CurrMemory(_Entity_):
"""
Current memory (Bytes) in use
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.CurrMemory, self).__init__()
self.yang_name = "curr-memory"
self.yang_parent_name = "memory-node-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "curr-memory"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.CurrMemory, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate.CurrMemory']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates.MemoryNodeTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.MemoryNode.MemoryNodeTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.MemoryNode']['meta_info']
class Ospfv3Protocol(_Entity_):
"""
OSPF v2 Protocol threshold configuration
.. attribute:: ospfv3_protocol_templates
OSPF v2 Protocol threshold templates
**type**\: :py:class:`Ospfv3ProtocolTemplates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol, self).__init__()
self.yang_name = "ospfv3-protocol"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospfv3-protocol-templates", ("ospfv3_protocol_templates", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates))])
self._leafs = OrderedDict()
self.ospfv3_protocol_templates = PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates()
self.ospfv3_protocol_templates.parent = self
self._children_name_map["ospfv3_protocol_templates"] = "ospfv3-protocol-templates"
self._segment_path = lambda: "ospfv3-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol, [], name, value)
class Ospfv3ProtocolTemplates(_Entity_):
"""
OSPF v2 Protocol threshold templates
.. attribute:: ospfv3_protocol_template
OSPF v2 Protocol threshold template instance
**type**\: list of :py:class:`Ospfv3ProtocolTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates, self).__init__()
self.yang_name = "ospfv3-protocol-templates"
self.yang_parent_name = "ospfv3-protocol"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospfv3-protocol-template", ("ospfv3_protocol_template", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate))])
self._leafs = OrderedDict()
self.ospfv3_protocol_template = YList(self)
self._segment_path = lambda: "ospfv3-protocol-templates"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/ospfv3-protocol/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates, [], name, value)
class Ospfv3ProtocolTemplate(_Entity_):
"""
OSPF v2 Protocol threshold template instance
.. attribute:: template_name (key)
Template Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: input_lsa_acks_lsa
Number of LSA received in LSA Acknowledgements
**type**\: :py:class:`InputLsaAcksLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcksLsa>`
**presence node**\: True
.. attribute:: output_db_ds_lsa
Number of LSA sent in DBD packets
**type**\: :py:class:`OutputDbDsLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDsLsa>`
**presence node**\: True
.. attribute:: input_db_ds_lsa
Number of LSA received in DBD packets
**type**\: :py:class:`InputDbDsLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDsLsa>`
**presence node**\: True
.. attribute:: input_lsa_updates
Number of LSA Updates received
**type**\: :py:class:`InputLsaUpdates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdates>`
**presence node**\: True
.. attribute:: output_db_ds
Number of DBD packets sent
**type**\: :py:class:`OutputDbDs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDs>`
**presence node**\: True
.. attribute:: output_lsa_updates_lsa
Number of LSA sent in LSA Updates
**type**\: :py:class:`OutputLsaUpdatesLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdatesLsa>`
**presence node**\: True
.. attribute:: input_db_ds
Number of DBD packets received
**type**\: :py:class:`InputDbDs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDs>`
**presence node**\: True
.. attribute:: input_lsa_updates_lsa
Number of LSA received in LSA Updates
**type**\: :py:class:`InputLsaUpdatesLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdatesLsa>`
**presence node**\: True
.. attribute:: sample_interval
Frequency of sampling in minutes
**type**\: int
**range:** 1..60
**units**\: minute
.. attribute:: output_packets
Total number of packets sent
**type**\: :py:class:`OutputPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputPackets>`
**presence node**\: True
.. attribute:: input_packets
Total number of packets received
**type**\: :py:class:`InputPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputPackets>`
**presence node**\: True
.. attribute:: output_hello_packets
Total number of packets sent
**type**\: :py:class:`OutputHelloPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputHelloPackets>`
**presence node**\: True
.. attribute:: input_hello_packets
Number of Hello packets received
**type**\: :py:class:`InputHelloPackets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputHelloPackets>`
**presence node**\: True
.. attribute:: output_ls_requests
Number of LS Requests sent
**type**\: :py:class:`OutputLsRequests <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequests>`
**presence node**\: True
.. attribute:: output_lsa_acks_lsa
Number of LSA sent in LSA Acknowledgements
**type**\: :py:class:`OutputLsaAcksLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcksLsa>`
**presence node**\: True
.. attribute:: output_lsa_acks
Number of LSA Acknowledgements sent
**type**\: :py:class:`OutputLsaAcks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcks>`
**presence node**\: True
.. attribute:: input_lsa_acks
Number of LSA Acknowledgements received
**type**\: :py:class:`InputLsaAcks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcks>`
**presence node**\: True
.. attribute:: output_lsa_updates
Number of LSA Updates sent
**type**\: :py:class:`OutputLsaUpdates <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdates>`
**presence node**\: True
.. attribute:: output_ls_requests_lsa
Number of LSA sent in LS Requests
**type**\: :py:class:`OutputLsRequestsLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequestsLsa>`
**presence node**\: True
.. attribute:: input_ls_requests_lsa
Number of LSA received in LS Requests
**type**\: :py:class:`InputLsRequestsLsa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequestsLsa>`
**presence node**\: True
.. attribute:: input_ls_requests
Number of LS Requests received
**type**\: :py:class:`InputLsRequests <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequests>`
**presence node**\: True
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate, self).__init__()
self.yang_name = "ospfv3-protocol-template"
self.yang_parent_name = "ospfv3-protocol-templates"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['template_name']
self._child_classes = OrderedDict([("input-lsa-acks-lsa", ("input_lsa_acks_lsa", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcksLsa)), ("output-db-ds-lsa", ("output_db_ds_lsa", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDsLsa)), ("input-db-ds-lsa", ("input_db_ds_lsa", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDsLsa)), ("input-lsa-updates", ("input_lsa_updates", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdates)), ("output-db-ds", ("output_db_ds", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDs)), ("output-lsa-updates-lsa", ("output_lsa_updates_lsa", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdatesLsa)), ("input-db-ds", ("input_db_ds", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDs)), ("input-lsa-updates-lsa", ("input_lsa_updates_lsa", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdatesLsa)), ("output-packets", ("output_packets", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputPackets)), ("input-packets", ("input_packets", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputPackets)), ("output-hello-packets", ("output_hello_packets", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputHelloPackets)), ("input-hello-packets", ("input_hello_packets", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputHelloPackets)), ("output-ls-requests", ("output_ls_requests", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequests)), ("output-lsa-acks-lsa", ("output_lsa_acks_lsa", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcksLsa)), ("output-lsa-acks", ("output_lsa_acks", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcks)), ("input-lsa-acks", ("input_lsa_acks", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcks)), ("output-lsa-updates", ("output_lsa_updates", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdates)), ("output-ls-requests-lsa", ("output_ls_requests_lsa", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequestsLsa)), ("input-ls-requests-lsa", ("input_ls_requests_lsa", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequestsLsa)), ("input-ls-requests", ("input_ls_requests", PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequests))])
self._leafs = OrderedDict([
('template_name', (YLeaf(YType.str, 'template-name'), ['str'])),
('sample_interval', (YLeaf(YType.uint32, 'sample-interval'), ['int'])),
])
self.template_name = None
self.sample_interval = None
self.input_lsa_acks_lsa = None
self._children_name_map["input_lsa_acks_lsa"] = "input-lsa-acks-lsa"
self.output_db_ds_lsa = None
self._children_name_map["output_db_ds_lsa"] = "output-db-ds-lsa"
self.input_db_ds_lsa = None
self._children_name_map["input_db_ds_lsa"] = "input-db-ds-lsa"
self.input_lsa_updates = None
self._children_name_map["input_lsa_updates"] = "input-lsa-updates"
self.output_db_ds = None
self._children_name_map["output_db_ds"] = "output-db-ds"
self.output_lsa_updates_lsa = None
self._children_name_map["output_lsa_updates_lsa"] = "output-lsa-updates-lsa"
self.input_db_ds = None
self._children_name_map["input_db_ds"] = "input-db-ds"
self.input_lsa_updates_lsa = None
self._children_name_map["input_lsa_updates_lsa"] = "input-lsa-updates-lsa"
self.output_packets = None
self._children_name_map["output_packets"] = "output-packets"
self.input_packets = None
self._children_name_map["input_packets"] = "input-packets"
self.output_hello_packets = None
self._children_name_map["output_hello_packets"] = "output-hello-packets"
self.input_hello_packets = None
self._children_name_map["input_hello_packets"] = "input-hello-packets"
self.output_ls_requests = None
self._children_name_map["output_ls_requests"] = "output-ls-requests"
self.output_lsa_acks_lsa = None
self._children_name_map["output_lsa_acks_lsa"] = "output-lsa-acks-lsa"
self.output_lsa_acks = None
self._children_name_map["output_lsa_acks"] = "output-lsa-acks"
self.input_lsa_acks = None
self._children_name_map["input_lsa_acks"] = "input-lsa-acks"
self.output_lsa_updates = None
self._children_name_map["output_lsa_updates"] = "output-lsa-updates"
self.output_ls_requests_lsa = None
self._children_name_map["output_ls_requests_lsa"] = "output-ls-requests-lsa"
self.input_ls_requests_lsa = None
self._children_name_map["input_ls_requests_lsa"] = "input-ls-requests-lsa"
self.input_ls_requests = None
self._children_name_map["input_ls_requests"] = "input-ls-requests"
self._segment_path = lambda: "ospfv3-protocol-template" + "[template-name='" + str(self.template_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-manageability-perfmgmt-cfg:perf-mgmt/threshold/ospfv3-protocol/ospfv3-protocol-templates/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate, ['template_name', 'sample_interval'], name, value)
class InputLsaAcksLsa(_Entity_):
"""
Number of LSA received in LSA Acknowledgements
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcksLsa, self).__init__()
self.yang_name = "input-lsa-acks-lsa"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-lsa-acks-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcksLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcksLsa']['meta_info']
class OutputDbDsLsa(_Entity_):
"""
Number of LSA sent in DBD packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDsLsa, self).__init__()
self.yang_name = "output-db-ds-lsa"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-db-ds-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDsLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDsLsa']['meta_info']
class InputDbDsLsa(_Entity_):
"""
Number of LSA received in DBD packets
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDsLsa, self).__init__()
self.yang_name = "input-db-ds-lsa"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-db-ds-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDsLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDsLsa']['meta_info']
class InputLsaUpdates(_Entity_):
"""
Number of LSA Updates received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdates, self).__init__()
self.yang_name = "input-lsa-updates"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-lsa-updates"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdates, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdates']['meta_info']
class OutputDbDs(_Entity_):
"""
Number of DBD packets sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDs, self).__init__()
self.yang_name = "output-db-ds"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-db-ds"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDs, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputDbDs']['meta_info']
class OutputLsaUpdatesLsa(_Entity_):
"""
Number of LSA sent in LSA Updates
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdatesLsa, self).__init__()
self.yang_name = "output-lsa-updates-lsa"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-lsa-updates-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdatesLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdatesLsa']['meta_info']
class InputDbDs(_Entity_):
"""
Number of DBD packets received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDs, self).__init__()
self.yang_name = "input-db-ds"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-db-ds"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDs, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputDbDs']['meta_info']
class InputLsaUpdatesLsa(_Entity_):
"""
Number of LSA received in LSA Updates
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdatesLsa, self).__init__()
self.yang_name = "input-lsa-updates-lsa"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-lsa-updates-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdatesLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaUpdatesLsa']['meta_info']
class OutputPackets(_Entity_):
"""
Total number of packets sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputPackets, self).__init__()
self.yang_name = "output-packets"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputPackets']['meta_info']
class InputPackets(_Entity_):
"""
Total number of packets received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputPackets, self).__init__()
self.yang_name = "input-packets"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputPackets']['meta_info']
class OutputHelloPackets(_Entity_):
"""
Total number of packets sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputHelloPackets, self).__init__()
self.yang_name = "output-hello-packets"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-hello-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputHelloPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputHelloPackets']['meta_info']
class InputHelloPackets(_Entity_):
"""
Number of Hello packets received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputHelloPackets, self).__init__()
self.yang_name = "input-hello-packets"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-hello-packets"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputHelloPackets, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputHelloPackets']['meta_info']
class OutputLsRequests(_Entity_):
"""
Number of LS Requests sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequests, self).__init__()
self.yang_name = "output-ls-requests"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-ls-requests"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequests, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequests']['meta_info']
class OutputLsaAcksLsa(_Entity_):
"""
Number of LSA sent in LSA Acknowledgements
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcksLsa, self).__init__()
self.yang_name = "output-lsa-acks-lsa"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-lsa-acks-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcksLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcksLsa']['meta_info']
class OutputLsaAcks(_Entity_):
"""
Number of LSA Acknowledgements sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcks, self).__init__()
self.yang_name = "output-lsa-acks"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-lsa-acks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcks, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaAcks']['meta_info']
class InputLsaAcks(_Entity_):
"""
Number of LSA Acknowledgements received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcks, self).__init__()
self.yang_name = "input-lsa-acks"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-lsa-acks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcks, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsaAcks']['meta_info']
class OutputLsaUpdates(_Entity_):
"""
Number of LSA Updates sent
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdates, self).__init__()
self.yang_name = "output-lsa-updates"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-lsa-updates"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdates, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsaUpdates']['meta_info']
class OutputLsRequestsLsa(_Entity_):
"""
Number of LSA sent in LS Requests
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequestsLsa, self).__init__()
self.yang_name = "output-ls-requests-lsa"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "output-ls-requests-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequestsLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.OutputLsRequestsLsa']['meta_info']
class InputLsRequestsLsa(_Entity_):
"""
Number of LSA received in LS Requests
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequestsLsa, self).__init__()
self.yang_name = "input-ls-requests-lsa"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-ls-requests-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequestsLsa, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequestsLsa']['meta_info']
class InputLsRequests(_Entity_):
"""
Number of LS Requests received
.. attribute:: operator
Operator
**type**\: :py:class:`PmThresholdOp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdOp>`
.. attribute:: value
Threshold value (or start range value for operator RG)
**type**\: int
**range:** 0..4294967295
.. attribute:: end_range_value
Threshold end range value (for operator RG, set to 0 otherwise)
**type**\: int
**range:** 0..4294967295
.. attribute:: percent
Set to TRUE if Specified threshold values are in percent
**type**\: bool
.. attribute:: rearm_type
Configure the Rearm type
**type**\: :py:class:`PmThresholdRearm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg.PmThresholdRearm>`
.. attribute:: rearm_window
Configure the rearm window size (for rearm type Window)
**type**\: int
**range:** 1..100
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'manageability-perfmgmt-cfg'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequests, self).__init__()
self.yang_name = "input-ls-requests"
self.yang_parent_name = "ospfv3-protocol-template"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('operator', (YLeaf(YType.enumeration, 'operator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdOp', '')])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('end_range_value', (YLeaf(YType.uint32, 'end-range-value'), ['int'])),
('percent', (YLeaf(YType.boolean, 'percent'), ['bool'])),
('rearm_type', (YLeaf(YType.enumeration, 'rearm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_manageability_perfmgmt_cfg', 'PmThresholdRearm', '')])),
('rearm_window', (YLeaf(YType.uint32, 'rearm-window'), ['int'])),
])
self.operator = None
self.value = None
self.end_range_value = None
self.percent = None
self.rearm_type = None
self.rearm_window = None
self._segment_path = lambda: "input-ls-requests"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequests, ['operator', 'value', 'end_range_value', 'percent', 'rearm_type', 'rearm_window'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate.InputLsRequests']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates.Ospfv3ProtocolTemplate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol.Ospfv3ProtocolTemplates']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold.Ospfv3Protocol']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt.Threshold']['meta_info']
def clone_ptr(self):
self._top_entity = PerfMgmt()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_manageability_perfmgmt_cfg as meta
return meta._meta_table['PerfMgmt']['meta_info']
| 52.208259
| 3,346
| 0.477024
| 78,639
| 999,997
| 5.755948
| 0.006409
| 0.036797
| 0.045997
| 0.056555
| 0.970217
| 0.955506
| 0.919592
| 0.891241
| 0.876412
| 0.867582
| 0
| 0.013252
| 0.433507
| 999,997
| 19,153
| 3,347
| 52.210985
| 0.785776
| 0.205297
| 0
| 0.816833
| 0
| 0.012636
| 0.172133
| 0.089458
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090077
| false
| 0
| 0.031069
| 0
| 0.182935
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
12d796a49ed281bcd90924db24a6676777afabb5
| 12,937
|
py
|
Python
|
seating_charts/migrations/0001_initial.py
|
rectory-school/rectory-apps
|
184021529ac9cadc3b7c0fbf93a023c82fc76b91
|
[
"MIT"
] | null | null | null |
seating_charts/migrations/0001_initial.py
|
rectory-school/rectory-apps
|
184021529ac9cadc3b7c0fbf93a023c82fc76b91
|
[
"MIT"
] | 5
|
2020-06-05T17:33:12.000Z
|
2021-06-10T19:04:41.000Z
|
seating_charts/migrations/0001_initial.py
|
rectory-school/rectory-apps
|
184021529ac9cadc3b7c0fbf93a023c82fc76b91
|
[
"MIT"
] | 1
|
2016-02-08T15:53:28.000Z
|
2016-02-08T15:53:28.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('academics', '0022_auto_20160203_1038'),
]
operations = [
migrations.CreateModel(
name='Ethnicity',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('ethnicity', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='HistoricalEthnicity',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, blank=True, auto_created=True)),
('ethnicity', models.CharField(max_length=200)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical ethnicity',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalMealTime',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, blank=True, auto_created=True)),
('name', models.CharField(max_length=200)),
('include_boarding_students', models.BooleanField(default=False)),
('include_day_students', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical meal time',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalPinnedStudent',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, blank=True, auto_created=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical pinned student',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalSeatFiller',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, blank=True, auto_created=True)),
('description', models.CharField(blank=True, max_length=200)),
('seats', models.IntegerField()),
('display', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical seat filler',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalTable',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, blank=True, auto_created=True)),
('description', models.CharField(max_length=200)),
('capacity', models.IntegerField()),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical table',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalTableAssignment',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, blank=True, auto_created=True)),
('waitor', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical table assignment',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='Layout',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('name', models.CharField(max_length=25)),
],
),
migrations.CreateModel(
name='MealTime',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('name', models.CharField(max_length=200)),
('include_boarding_students', models.BooleanField(default=False)),
('include_day_students', models.BooleanField(default=False)),
('include_grades', models.ManyToManyField(to='academics.Grade')),
],
),
migrations.CreateModel(
name='PinnedStudent',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('meal_time', models.ForeignKey(to='seating_charts.MealTime')),
],
),
migrations.CreateModel(
name='SeatFiller',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('description', models.CharField(blank=True, max_length=200)),
('seats', models.IntegerField()),
('display', models.BooleanField(default=False)),
('meal_time', models.ManyToManyField(to='seating_charts.MealTime')),
],
),
migrations.CreateModel(
name='SeatingStudent',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('enrollment', models.ForeignKey(to='academics.Enrollment')),
('ethnicity', models.ForeignKey(null=True, to='seating_charts.Ethnicity')),
],
),
migrations.CreateModel(
name='Table',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('description', models.CharField(max_length=200)),
('capacity', models.IntegerField()),
('for_meals', models.ManyToManyField(to='seating_charts.MealTime')),
],
),
migrations.CreateModel(
name='TableAssignment',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('waitor', models.BooleanField(default=False)),
('meal_time', models.ForeignKey(to='seating_charts.MealTime')),
('student', models.ForeignKey(to='seating_charts.SeatingStudent')),
('table', models.ForeignKey(to='seating_charts.Table')),
],
options={
'permissions': (('view', 'Can view table assignments'), ('edit', 'Can edit table assignments')),
},
),
migrations.AddField(
model_name='seatfiller',
name='table',
field=models.ForeignKey(to='seating_charts.Table'),
),
migrations.AddField(
model_name='pinnedstudent',
name='student',
field=models.ForeignKey(to='seating_charts.SeatingStudent'),
),
migrations.AddField(
model_name='pinnedstudent',
name='table',
field=models.ForeignKey(to='seating_charts.Table'),
),
migrations.AddField(
model_name='layout',
name='left_print',
field=models.ForeignKey(related_name='+', to='seating_charts.MealTime'),
),
migrations.AddField(
model_name='layout',
name='right_print',
field=models.ForeignKey(null=True, related_name='+', blank=True, to='seating_charts.MealTime'),
),
migrations.AddField(
model_name='historicaltableassignment',
name='meal_time',
field=models.ForeignKey(null=True, db_constraint=False, related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, blank=True, to='seating_charts.MealTime'),
),
migrations.AddField(
model_name='historicaltableassignment',
name='student',
field=models.ForeignKey(null=True, db_constraint=False, related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, blank=True, to='seating_charts.SeatingStudent'),
),
migrations.AddField(
model_name='historicaltableassignment',
name='table',
field=models.ForeignKey(null=True, db_constraint=False, related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, blank=True, to='seating_charts.Table'),
),
migrations.AddField(
model_name='historicalseatfiller',
name='table',
field=models.ForeignKey(null=True, db_constraint=False, related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, blank=True, to='seating_charts.Table'),
),
migrations.AddField(
model_name='historicalpinnedstudent',
name='meal_time',
field=models.ForeignKey(null=True, db_constraint=False, related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, blank=True, to='seating_charts.MealTime'),
),
migrations.AddField(
model_name='historicalpinnedstudent',
name='student',
field=models.ForeignKey(null=True, db_constraint=False, related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, blank=True, to='seating_charts.SeatingStudent'),
),
migrations.AddField(
model_name='historicalpinnedstudent',
name='table',
field=models.ForeignKey(null=True, db_constraint=False, related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, blank=True, to='seating_charts.Table'),
),
migrations.AlterUniqueTogether(
name='tableassignment',
unique_together=set([('meal_time', 'student')]),
),
migrations.AlterUniqueTogether(
name='pinnedstudent',
unique_together=set([('student', 'meal_time')]),
),
]
| 50.338521
| 182
| 0.576254
| 1,189
| 12,937
| 6.063078
| 0.105971
| 0.053267
| 0.039534
| 0.049938
| 0.84228
| 0.838951
| 0.789014
| 0.780968
| 0.766264
| 0.722846
| 0
| 0.005225
| 0.275102
| 12,937
| 256
| 183
| 50.535156
| 0.763489
| 0.001623
| 0
| 0.768
| 0
| 0
| 0.191343
| 0.045609
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016
| 0
| 0.028
| 0.008
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12fd978d6b9af2d254295e45eea569132d410f9a
| 36
|
py
|
Python
|
testsuite/modulegraph-dir/diamond_a.py
|
xoviat/modulegraph2
|
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
|
[
"MIT"
] | 9
|
2020-03-22T14:48:01.000Z
|
2021-05-30T12:18:12.000Z
|
testsuite/modulegraph-dir/diamond_a.py
|
xoviat/modulegraph2
|
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
|
[
"MIT"
] | 15
|
2020-01-06T10:02:32.000Z
|
2021-05-28T12:22:44.000Z
|
testsuite/modulegraph-dir/diamond_a.py
|
ronaldoussoren/modulegraph2
|
b6ab1766b0098651b51083235ff8a18a5639128b
|
[
"MIT"
] | 4
|
2020-05-10T18:51:41.000Z
|
2021-04-07T14:03:12.000Z
|
import diamond_b1
import diamond_b2
| 12
| 17
| 0.888889
| 6
| 36
| 5
| 0.666667
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 0.111111
| 36
| 2
| 18
| 18
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
423db493cb9ea2627eb1d757f32eae3afa8bada1
| 13,419
|
py
|
Python
|
neoman/qt_resources.py
|
Yubico/yubikey-neo-manager-dpkg
|
e671c43020ec0c614bc140bfaf9b185d22a8a5d7
|
[
"BSD-2-Clause"
] | 5
|
2015-05-14T16:26:26.000Z
|
2021-11-16T21:26:38.000Z
|
neoman/qt_resources.py
|
DalavanCloud/yubikey-neo-manager-dpkg
|
e671c43020ec0c614bc140bfaf9b185d22a8a5d7
|
[
"BSD-2-Clause"
] | 2
|
2015-01-30T10:30:41.000Z
|
2015-04-16T09:31:38.000Z
|
neoman/qt_resources.py
|
DalavanCloud/yubikey-neo-manager-dpkg
|
e671c43020ec0c614bc140bfaf9b185d22a8a5d7
|
[
"BSD-2-Clause"
] | 4
|
2015-05-14T16:26:30.000Z
|
2021-11-16T21:26:28.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Thu Nov 12 10:26:36 2015
# by: The Resource Compiler for PySide (Qt v4.8.4)
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore
qt_resource_data = "\x00\x00\x01\xcc\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xde\x03\x13\x0a0\x13\xb7\x83\xa7\xa5\x00\x00\x00\x1diTXtComment\x00\x00\x00\x00\x00Created with GIMPd.e\x07\x00\x00\x010IDAT8\xcb\xcd\xd21K\x9bq\x10\xc7\xf1ObH\x09\xcf\x10t\x08\xe4\xa9\x06\x0a%89\xd9J\xc0\xa1\xa3o \xb3P\xe8\xec\x1bp\x11|\x01]upV\xe2\x9aIp\xc9\xd2\xa1K;D\x9a.\xad\xda\xff\x10\xa4\xa1\xa0qJ\xd2!\x8f\x81H\x12\x92\xcd\xdfx\xbf\xbb\xef\xddq\x97\xb2\xa8\x822\xeax\x8b\xc3E\x8b\xdf\x08\xfe\x08\x06\x82o\x82\xf5\xcc\x93W\xadV\x07\xb3j\xfbK}\x97\xd7\x97:\xa5\x8eB\xab\xa0]nW\xc4\xba\xe9y\x9b\xb7>\xb4tJ\x1d\xd1]\xa4rR!\xd6\x85\xcc\xf3\xc4Z\xad\x96\x9a0z\x09MD\x9b\xa7\x9b\xb2\x8f\xd9\x915\xef\x04\xfb\x88p^\xf8Y\x183\xc6\x00\xdd|\x97 \xff\xac\xfb\x1av\xd1K@\x93\x01\x0f+\x0f\xea\x07\xf5aI\xf0Y\xf0*\xb1>!\x8b3\xb1\x1fS\x01\xb9\x7f9\xc5f\x11r\xd8CCP\xc4\xc7$\xe5h\xd2n#@\xba\x97\xb6}\xbc\x0d\xef\xf1\x0b\xef\xf0\x05\xaf\xf1\x1b\x8d\x99\x80\x91b_\xb1\x85\x16JI\xf4Bl0\x1f`\x08ic\x07\xf7I\xe4j\xdayR\xb3>\xf1v\xe3\x96\x14\xcb7\xcb\xa2\xbf\xd1\xc4\x7f\xc9\xcc:\xfe\xea\xf7U/_\xff\x01b\x88P\x1b\xd1\xc6\xe2Q\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00\xd5\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xde\x03\x13\x0a1%a\x22\x03}\x00\x00\x00\x1diTXtComment\x00\x00\x00\x00\x00Created with GIMPd.e\x07\x00\x00\x009IDAT8\xcbc`\x18h\xc0\x08c\x84\x86\x86\xfe'E\xe3\xea\xd5\xab\x19\x19\x18\x18\x18\x98(u\x01\x0b.\x93q\x01t\x97R\xec\x82Q\x03\x06\x83\x01,\x84\xe2\x99\xe6.\x18x\x00\x00r\x01\x0a\x1a]I8\xe0\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x02\x05\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x14\x00\x00\x00\x14\x08\x06\x00\x00\x00\x8d\x89\x1d\x0d\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xde\x09\x10\x0b#6\xc4\xaf\xef\x08\x00\x00\x00\x1diTXtComment\x00\x00\x00\x00\x00Created with GIMPd.e\x07\x00\x00\x01iIDAT8\xcb\xb5\x95\xafo\xc2@\x14\xc7?\x05\x048,\x0apmBR;\xc7M\xd5\xb2\xff`K0S\xb0\xbf\xa0\xb9\xbf`C\xeeO\x98\x1a\xa2\xe6\xd48\x8f\xc1\x81b(pW\x81\x84d\x86v\x0di\xef\x96\x90=\xd5w\xf7\xbdO\xde\x8f\xbcW\x0f\x87\xc5q,\x8a\xbe\x94ra\xd3{\x16\xc8\x04\x18U\xbc\x9b\x03\xb32\xb8W\x02{\x05\xa6\xfc\xcd\xde\xa4\x94/\x95\xc08\x8e?-QU\xd9\x5cJ\xf9\x909\xf5\xab\xc8\x1e\xcb^\x04A\xc0x<f0\x18\xb0\x5c.\xaf\xaf}!D[k\xad\x00j\x85\x9aU\xa6\x19\x86!\xcdf\xd3\x16\xe54k^\xe3r0\xb1\xa9\x95Rl6\x1b\xd6\xeb\xb5M6\x01\x16\x19\xd0Z7c\x0c\xc6\x18W-G\x00\xde%\xd4\xaf*U\xab\xd5\xc2\xf7}\xc20D)\xc5~\xbf\xb7A\xef\x1b\xb6\xdb~\xbf\xcfp8\xa4\xd7\xeb\x01\xb8\xeaH\xde\x94*K\xd3\x14\xadu\xeeo\xb7\xdb\xdb\x80\xc6\x18\xba\xdd.\x80\xab!\xbf@\xd7lf\xe9\xeev;'LJ\xb9\xa8\x15f\xd3\x0a<\x1c\x0e\x04A`\x9d\x98b\xca\xb32E\xa7\xd3\xc9\xbf\xa3(\x22MS\x1bp\x96\x8f\x9e\xd6\xfa[\x08\xd1\x06\xee\x8a\x8a\xf3\xf9\xcc\xe9tb\xb5Z\x91$\x09\xc7\xe3\xd1\xb6$\xde\xffw9\x5c\x22\xfd(\x8b\xd4\xb1\xbe\x9e\x8a\x07\xf5k\x85\xd6Z\x09!4\xd0\x06|K\x03\x9e\xb34\x9d\x1b\xfb\x96_\xc0\x0fs\x8b\x87\xf4\xd2\xd9\xef\x13\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\xb4\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xde\x03\x13\x0a1\x15G\xfb3\xd1\x00\x00\x00\x1diTXtComment\x00\x00\x00\x00\x00Created with GIMPd.e\x07\x00\x00\x01\x18IDAT8\xcb\xcd\xd2\xbb/C\x01\x14\xc7\xf1\xcf\xd5\xdb\xdc\x96H\xb4\xe3\x0d\x12\x83\x18\xec\x12b\xb0X\xbb\xf5\xef\x90\x18\xfdK\x1d\xed\x161\xf8\x13\xc4\x84\xb8\x22\x1a\x09Z-m]\x83\xebz\xf4\x91\xda\x9c\xf1<\xbe\xe7w\x1e\x81\xbfZb\x0e\x1b\x98\xc3y\xf8\xc7\xe2Yl\xa2\x84G\x5c\x07\x9f\xb1z\xbd\x9eN\xaaM\x83Ts\xa5\xa9W\xee\x89Z\x91\xa3\xfd\xa3Pl03m\xf3v\xb5\xadW\xee\x09_C\x95\xab\x0a\xb1\x01\x0c\x8d\xd0h4\x82\x11\xd2\xcb\xd8A\xa1vP;\x9cy\xfb\xea;\xad\x82U\x14p\x13\xb5\xa3\x1f\x81\x1f\x80A8 \xf9\xa5\xea\xa3\xfb\x22R\x9c\xfd&\xe7\x80~\xb1\xefv\xed\x16v%\xd6%yl9\xcbK\xc4Zc\x01\x85~A\xe9\xa9$\x93\xba\x82-\x89\x08KY\xca\xc5\xa8\xd9r@\x90\x06\xaa\x97U8\xc63\x16\xb0\x9d\xdd\xbc#v?\x11\x90[\xec\x01'h\xa3\x9cy\xef\xc6mw\xf4\x15b/8E?\xf3\xb4\xc6\x01\x86\xfe \xff\xc8=\xba\xf3]i\x90*v\x8a\xc2z8\x1d\xe0\xbbeK\xfd\xe7\xf6\x0e\x8f\x03I%d\x5c\x96\xd0\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00\xe1<RCC>\x0a<qresource>\x0a<file>icon_about.png</file>\x0a<file>neoman.png</file>\x0a<file>icon_installed.png</file>\x0a<file>qt_resources.qrc</file>\x0a<file>icon_some_installed.png</file>\x0a<file>icon_not_installed.png</file>\x0a</qresource>\x0a</RCC>\x0a\x00\x00\x09R\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x000\x00\x00\x000\x08\x06\x00\x00\x00W\x02\xf9\x87\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xde\x0a\x0e\x0b9/\xe7\x8f\xe7o\x00\x00\x00\x1diTXtComment\x00\x00\x00\x00\x00Created with GIMPd.e\x07\x00\x00\x08\xb6IDATh\xde\xc5\x9amlS\xe7\x15\xc7\x7f\xf7\xda\xb1\x89\xf3B^L\xb2\x94\x10')I\x03\x1d/\xa5\xa5I \x14\xe7\xcb\x90h\xd9P\xa5\xee\x1d4Qi\xd3\xb4\x0f\xdb:WUU\x98T!\xfa\xa12H\x93\xa6IU\xa5\x22\x15iUWib\xeb\xba\xa9l\xab\x03iF\x15\xc2\xebX\x03\x09\x858$,$!\xc1\x8e\x13'\xf6}\xd9\x87<\x09\xbe\xbe\xd7\x8e\x1d\xdc\xee\xe4S\xec{\xcfs\xces\xces\xce\xff\xfc\x1fK\xe4P\xfc\x1d\xed\x0e`\x05\xe0\x04lI_\xab\xc0\x1c0\xeb\xf3\x06b\xb9ZS\xca\x81\xd1%\xc03\xc0\x16\xa0\x09\xa8\x03V\x03\xc5\x09\xfau \x0c\x0c\x03\xb7\x80k@\x0f\xd0\xe5\xf3\x06&\xff/\x0e\xf8;\xda\xeb\x81\x83\xc0nal~\x96*\xa2\xc0\x14\xf0\x11\xf0\xba\xcf\x1b\x08~%\x0e\xf8;\xda\xd7\x00\xaf\x02?%\xb7\xf2\x16\xf0\x06p\xdb\xe7\x0d\xe89w\xc0\xdf\xd1^\x08\xbc\x04\xfc\x1c(\xe3\xcb\x91\x09\xe07\xc01\x9f7\x10\xc9\x99\x03\xfe\x8e\xf6J\xe0\xaf\x22\xcf\xd3\x8a\xaekhh\xe8\xba&R\x7f~\x19I\x92\x91\x91\x91$9\x93%/\x02\xbb}\xde\xc0\xc8C;\xe0\xefh\xdf\x0c\x9c\x03\xec\x96\x06\xa3#\x01\x0e\x9b\x0b\x87\xdd\x85\xdbUGeQ#e.\x0f\xf9y\xc5\xf3\xc9\x1e\x0f31\x13\xe4n\xa4\x9f\xb1\xe9\x9b\xc4\x95\x19b\xea\x0c\xfa\xbck\xa9\x96\x8e\x01O\xfb\xbc\x81\xcb\xcbv\xc0\xdf\xd1\xfeM\xe0\x0f\xa2,\x9aLW\xb4\x18\x95\x85\x8d4\xacj\xc3S\xba\x95\xaa\xa2u\xd8\xe4\xbc\xb4\x1b\xa2\xe9\x0aw\xc2\xbd\x04'\xce\xd1?\xde\xc9H\xa4\x8f<\xd9\x91\xca\x94Y\xe0{>o\xe0d\xd6\x0e\x88\x9d\xff\xcc\xcaxMW\xb1I6v5\xbdBC\xf9\x0e\x1cv\xd7\xb2\x12>\xa6\xce\xd0?v\x86\x8f\xaf\xbf\x89\xaa\xab\xc8\x92-U$\x9a}\xde\xc0\xa5\x8c\x1d\x109?\x94*m\xaaWn`\xef\xd7\x8f\xe0\xb4\x17\xe6\xe4\xe4\xce*\x11N^=\xc8p\xe8J\xaaG\xe2\xc0\x1a\x9f7pwI\x07D\xb59m>\xb0:\xaa\xa6\xb0\xad\xeeG\xb4\xd6\xec_2U\xb2\x15MW8\x1b|\x97\xae\x81\xe3\xd8$\xbb\x95i=\x80\xd7\xe7\x0dL'~hU\x12^\xb2\xaa6\xaa\xa6\xf0\xdc\xfa_\xd3V\xfbb\xce\x8d\x07\x90%;\xdbk\x0f\xf0\xec\xbaC\xa8\xbab\xf5\xc8S\x80/m\x04D\x93\xbadU\xe7\x9b=?\xa0\xad\xf6E\xc39\xd0t\xd5\xb4\x8aM\xceKWYP\xb5\x18\xba\xc9x\x19Yz\x90\xad]\x03\xc7\xf9,x\xc2\xea\xf5\x10\xb0\xd1\xe7\x0d\x0c.|\x90\x9c\xe3\xaf&\x1b\xaf\xe9*5%\x9bi\xad\xd9ox\xf0\xbf\xe1\xcf\xe9\xbe\xfd\x1e6\xe9A4\x14m\x8e\xe6\x9a\xef\xb3z\xe5FK\xe3#\xb1q\xfe\xdew\xcc\xf0\x8e\xaa+lzd\x0f\xf5e-\x8b\x9f\xb5x\xf61\x14\xba\xcc\xe0\xe4\xc5\xe4\x83\xbd\x12x\x0d\xf8\x89)\x02\x02\xdb|\x91\x9c\xf7\x12\x12?\xdb\xfeg\xd3\x81\xd5t\x0d\x7f\xc7\xce\xf9\x05\xa4y5\x9a\x16\xa7\xae\xbc\x85\x176\xfa-\x1d8\x1b<\xc1\xe9/~\x87\xdd\xb6b\xa1\xeb\xa1\xe81~\xf5\xcc?\xc9\xb3\x19\xa1\xd4\x9c2\xc5o\xbb\xbe%\x9a\xa1)\xa2\xb5\x0b\xd8)\xf1\x0c\x1cL~J\xd1b\xecjz\xc5\xb2\xda\xc8\x92\xcc\x96\xea\xe7Qu\x05I\xfc\xd9d\x077\xc6?%\x1a\x0fY:pi\xf8\x8f\xd8e\xe7\xe2\xf3\x9a\xae\xb0\xa9j\x8f\xc9x\x00\xa7\xbd\x88]\x8d/\xa3h\x96\xc8\xfb5\xc3!\x16\x90xwr\x87\xad,l\xa4\xa1|G\xca|\xde\xb2\xfayQ1\x1e\x88]v\xd0}\xfb\xf7\xa6g\x87BW\x08\xcd\x8e\x18\xa0\x84$\xd9x\xb2\xfa\x85\x94\xfa\x1b+\xbcT\x146\xa0\x9bN\x0d{\xfc\x1d\xed\xc5\x89\x11xF@b\xc3\xe9nX\xd5\x96\xb6I\x159+XS\xfa\x84a\x01Y\xb2s}\xf44\xaa\x167<{~\xe8\x03\xec\xb2\xd3\xb0A5%\x9b)\xcd_\x9dR\xbf\xc3\x96\xcfc\xabvZ\x95\x84\x95\xc2\xe6E\x07\xb6$\xe3y\x87\xcd\x85\xa7tk\xda\xd2\x97g\xcb\xa7\xbe\xac\xc5Tu\xa2\xf1\x10\xc1\xc9\xf3\x09\xff\xdfg(\xf4o\xe3\xee#Q_\xdej\x99>\x89\xe2)\xdd\x8a\xc3f\xda\xc4|QV\x91\xc5\x18\xd8d\xf2\xde\xee\xa2\xaah\xdd\x92\xf5\xbb\xbe\xbc\x05\x87\xbd\xc0\xd86\xb5Y\x06&\xcf\x09D\x0a\xc1\xc9\x0b\xc4\x95\x99\xa4\x1c/`\xad{\xc7\x92\xfa\xab\x8a\x9bL\xfa\x85\xac\xf7w\xb4;e1\xc3\xd6%Cb\xb7\xab.\xa3\x86U\xee\xf2PQ\xb8v\xd1\xd8\x05\x19\x98\xe8fN\x9dF\xd3U\x06\xef_0\x1cF]\xd7x\xa4xC\xda\xf4ILIwA\x9dI?P\x0f\xac\x90\x05X3h\xd2\xd0\xa8,j\xcc\xb8\x8b>\xbd\xe6\xbb\xa8z\xdc\x90\x1ew#7\xb87\x1d$\xa6N\x13\x9c\xe8A\x92\xa4\x84\xda\x1f\xa7\xb5\xf6\x87\x19\xeb\xaf,jD\xc3\xe4\xc0j\xc0!\x0b\xf6\xa089\x02e.O\xc6\x0b\xd4\x955S\x92\xbf\xda\xb0K6\xc9N\xcf\xed\xf7\x99\x98\x19\xe4\xde\xcc\xe0b-\xd7t\x95\xaa\xa2&\xaa\x8a\xd6g\xac\xbf,\xbf\xc6*\x02\xc5\x80]\xb6\x06u\xfa\xe20\x92\xa9\xb4z\xf6\x13\xd7\xe6\x0c\x90\xe2\xc6\xbd\x7f\xf1\xe9\xadw\x0c\xa9\xa8hsl\xab=\x90\x95\xee\x15\xf6\xc2\x84\xe9\xce\x88\xe3dr$\x0d\xee6\x0a\x1de\xa6\xa1gp\xf2B\x02\x1c\xd0)s\xd5P\xbdrC\xee@`\x02oc\xe8\x02\xd1x8+EN{!\x8f\xba\xb7\x99B\x9dX:u\xa0\xc1\xbd#\xeb9bN\x89X\xc1\x09m\xc1\x01U\x90N\x86E'f\xb2\xa3id\xc9N]i3v\xd9\x91\xb61=Z\xbe-\xd3\xc1~Q\xeeE\x07\xad\xde\x09\x03\x8a,\xe8\xbeacXd\xeeF\xfa\xb3\x0eg]ys\xda\xb3S\xe8t\xe3)}2k\xbd\xa3S\xfd\xc8\xe6l\x1f\x02b\xb2\x18\x9co%G`l\xfa&\x9a\xf5`\x91\xe6\xb0\x15QW\xdej9'(\xda\x1c[\xab\xbf\x9d\xb5\xf1\xaa\xae06}\xd3*\x02\xb7\x80YY\x10\xad\xd7LC\xa82\xc3\x9dpo\xd6\x0b\xb6z\xf6\x19z\xc2\x02\xeeq\xd8]l\xa8z.k}#\xe1^b\xca\xb4\xd5W\xbd>o`NN\x987\xa3\xc9\x8cAp\xe2\x5c\xd6\x0b\xde\x09\x7f\x8e\x94\x14nU\x8b\xd1R\xb3oYU&8\xd9\xc3\x9c:c\xc5\xabv'V\xa1.A\xb4\x1a*F\xffx'1\xf3\xcbi\xa5{\xf0=\x13\x04q\xda\x0bY_\xf9\x8d\xac\x8d\x8f\xabQ\xae\x8f\x9d\xb6\xfa*\x0ct.: (\xee\x8f\x8c\x85Tb$\xd2G\xff\xd8\x99\x8c\x17\xbc}\xff2\x93\xd1!\x03:\xd5u\x8d\xda\xd2\xad\x148\xb2\xa7S\xaf\x8d~\xc2h\xa4\xdfj\xc6\xfe\xd0\xe7\x0d\x84\x93\x1b\xd9\xeb&\xb8,;\xf8\xf8\xfa\x9b\xcc*K\xf3\xac::7\xc6;M\xf9j\x93\xf3X\xebnK[^\xad\xb9\xa2)N\xf5\x1dM\xf5\xdea\x13\xad\x22f\xcc\xb7\x92\x1b\x9a\xaa\xab\x9c\xbczp\xc9\x8a\x14S\xa6\xe9\x1f\xef4\x80\xb6\xc5\x06W\xbe-k\x8e\xe8\xe4\xd5C\xa2\x9a\x99v\xff\xedDV\x22\xb96\xbd!(\xee\x84\x06ec8t\x85\xb3\xc1w\xd3.\xda;\xfa\x0fF\x22}\xc4\xd4(1u\x9e\xbc\x8d*a\xd6\xba\xdb\xb2\xc6Ug\x83'\x18\x0e]\xb1\xa2\x1aC\xc0\x11\xc3\x08\x9b\x9c\xc6\x82\x9f7\xa5S\xd7\xc0qJ\xf2\xaby<\xc5alp\xef\xe0\x17m\x7f3%V\x81\xc3\x9d\x95\xf1\xff\xb9{\x8a\xae\x81w\x0c\xd4K\x82\x1cK\xbe\xc9IE-\x9e\x01\x9e0Q\x8b\xba\xc2\xf6\xda\x03\xb4x\xf6\x99\x86\xf9\xdcP\x8b'\x84\xf1\x96\xd4\xe2y`g2\xb5\x98\x8a\xdc\xfd\x1a\x10\x04,OPu\xc9F\xf6>~\x18\xa7\xbd(G\xe4\xee\x14'\xaf\x1eJG\xee\xc6\x80\x9a\x8c\xc8\xdd\x04'6\x09z}\x85\x15\xbd.K6v5\xbeLc\x85\x17\x87-\x7fY\x86\xc7\xd5(\xd7F?\xe1T\xdf\xd1E\x9dV`\x14h\xf5y\x03\x17\xb3\xba\x1f\x10N\xec\x05\xde\xb7\x8e\xc4\xfc\x05GEa\x03\x8f\xad\xda9\x7f\xc1Q\xdcd\xe08Sa\x9b\x91p/\xc1\xc9\x1e\xae\x8f\x9df4\xd2/J\xa5\x94\xca\xf8\xef\xf8\xbc\x81?\xa5\xd2\x97\xe9\x15S7\x90\x97\xaa\xfe?\xb8b*\xc0] \xae\x98\xf2k\xc4$5\x8f\xe7\xefE\x07\x19\x9d\x9a\xbfb\x8a)\xd3\x8b\xf0`\x89+\xa6\x96T;\x9f\xb1\x03\xc2\x89J\xe0/\x0b\x5cL\xda\x86\x96\x9bK\xbe\xf3\xc0\xb3V9\xbf,\x07\x84\x13\x05\x82\x9f\xff\xa5`\xc6\xbe\x0c\x09\x01\xc7\x80\xa3\xc9\xd5\xe6\xa1\x1dHp\xa4F\x90\xab?\xce\xb1\xf1o\x03G\xb2\xbd\xb1\x7f\x98\x9f\x1ax\x84#{DD\x96\xf3S\x830\xf0!p8\x11\x1e|%\x0e$8R,\x88\xd6\xa7\x80\xf5\x821[\xf8\xb1\x87\x9c0\x80\x87\xc5\x18x\x0b\xe8\x15\x85\xa1s\x01U.W\xa4\x5c\xe6\x80\xbf\xa3\xdd)\xfa\x86\xc3\x02\xa6(\xa2\xb2\xcc\xfa\xbc\x81\xb9\x5c\xad\xf9?\xb4\xddFJx\xea]\xf1\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = "\x00\x12\x09\xeb\xb6g\x00i\x00c\x00o\x00n\x00_\x00i\x00n\x00s\x00t\x00a\x00l\x00l\x00e\x00d\x00.\x00p\x00n\x00g\x00\x16\x03\xa0mG\x00i\x00c\x00o\x00n\x00_\x00n\x00o\x00t\x00_\x00i\x00n\x00s\x00t\x00a\x00l\x00l\x00e\x00d\x00.\x00p\x00n\x00g\x00\x0e\x02\x0f\xe3\x87\x00i\x00c\x00o\x00n\x00_\x00a\x00b\x00o\x00u\x00t\x00.\x00p\x00n\x00g\x00\x17\x0b\xf7\xc0g\x00i\x00c\x00o\x00n\x00_\x00s\x00o\x00m\x00e\x00_\x00i\x00n\x00s\x00t\x00a\x00l\x00l\x00e\x00d\x00.\x00p\x00n\x00g\x00\x10\x08X\xa8#\x00q\x00t\x00_\x00r\x00e\x00s\x00o\x00u\x00r\x00c\x00e\x00s\x00.\x00q\x00r\x00c\x00\x0a\x03\x8f\xcf\x87\x00n\x00e\x00o\x00m\x00a\x00n\x00.\x00p\x00n\x00g"
qt_resource_struct = "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00\x01\x00\x00\x00\x5c\x00\x00\x00\x00\x00\x01\x00\x00\x02\xa9\x00\x00\x00\xd8\x00\x00\x00\x00\x00\x01\x00\x00\x07O\x00\x00\x00*\x00\x00\x00\x00\x00\x01\x00\x00\x01\xd0\x00\x00\x00\xb2\x00\x00\x00\x00\x00\x01\x00\x00\x06j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00\x01\x00\x00\x04\xb2"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 609.954545
| 11,866
| 0.751174
| 2,988
| 13,419
| 3.359103
| 0.270415
| 0.108797
| 0.084288
| 0.039454
| 0.208728
| 0.189897
| 0.187606
| 0.185514
| 0.172562
| 0.166185
| 0
| 0.260305
| 0.007452
| 13,419
| 21
| 11,867
| 639
| 0.49328
| 0.013637
| 0
| 0
| 0
| 0.333333
| 0.972634
| 0.969761
| 0
| 0
| 0.000605
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
427515233b092e6e38d0dfa0c1214853088db505
| 9,605
|
py
|
Python
|
CliHTTPClient.py
|
pkamleshkumar12/cli-http-client
|
e72c6c299b79d16c176537074dda5674f97fb651
|
[
"Apache-2.0"
] | null | null | null |
CliHTTPClient.py
|
pkamleshkumar12/cli-http-client
|
e72c6c299b79d16c176537074dda5674f97fb651
|
[
"Apache-2.0"
] | null | null | null |
CliHTTPClient.py
|
pkamleshkumar12/cli-http-client
|
e72c6c299b79d16c176537074dda5674f97fb651
|
[
"Apache-2.0"
] | null | null | null |
from cement import App
from cement import App, Controller, ex
from App.Configuration import Configuration
from cement.utils.misc import init_defaults
from App.services.RequestStrategy import RequestStrategyBySOAP, Context, RequestStrategyByREST
class Base(Controller):
class Meta:
label = 'base'
@ex(
help='GET Request Command',
arguments=[
(['-sn', '--systemName'],
{'help': 'System Name',
'action': 'store',
'dest': 'systemName', }),
(['-in', '--interfaceName'],
{'help': 'Interface Name',
'action': 'store',
'dest': 'interfaceName', }),
(['-vn', '--versionNumber'],
{'help': 'Version Number',
'action': 'store',
'dest': 'versionNumber', }),
(['-uc', '--useCase'],
{'help': 'Use Case',
'action': 'store',
'dest': 'useCase', }),
(['-env', '--environment'],
{'help': 'Environment',
'action': 'store',
'dest': 'environment', }),
(['--loadRequestFileFrom'],
{'help': 'file location to load requests',
'action': 'store',
'dest': 'loadRequestFileFrom', }),
(['--exportLogsTo'],
{'help': 'export logs to give file name ',
'action': 'store',
'dest': 'exportLogsTo', }),
(['-ws', '--webservice'],
{'help': 'specify the type of web service, eg: SOAP, REST(default)',
'action': 'store',
'dest': 'webService', }),
]
)
def get(self):
self.app.log.info('Inside get command!')
ws = ""
if self.app.pargs.systemName:
self.app.log.info("Received option: systemName => %s" % self.app.pargs.systemName)
if self.app.pargs.interfaceName:
self.app.log.info("Received option: interfaceName => %s" % self.app.pargs.interfaceName)
if self.app.pargs.versionNumber:
self.app.log.info("Received option: versionNumber => %s" % self.app.pargs.versionNumber)
if self.app.pargs.useCase:
self.app.log.info("Received option: useCase => %s" % self.app.pargs.useCase)
if self.app.pargs.environment:
self.app.log.info("Received option: environment => %s" % self.app.pargs.environment)
if self.app.pargs.loadRequestFileFrom:
self.app.log.info("Received option: loadRequestFileFrom => %s" % self.app.pargs.loadRequestFileFrom)
if self.app.pargs.exportLogsTo:
self.app.log.info("Received option: exportLogsTo => %s" % self.app.pargs.exportLogsTo)
if self.app.pargs.webService:
self.app.log.info("Received option: webService => %s" % self.app.pargs.webService)
ws = self.app.pargs.webService
config = Configuration(self.app.pargs.systemName,
self.app.pargs.interfaceName,
self.app.pargs.versionNumber,
self.app.pargs.useCase,
self.app.pargs.environment,
self.app.pargs.loadRequestFileFrom,
self.app.pargs.exportLogsTo,
self.app.pargs.webService)
context = Context(RequestStrategyBySOAP(), config)
if ws == "SOAP":
self.app.log.info("Client: Strategy is set to SOAP web service")
context.do_get_request()
else:
self.app.log.info("Client: Strategy is set to REST web service")
context.strategy = RequestStrategyByREST()
context.do_get_request()
@ex(
help='Post Request Command',
arguments=[
(['-sn', '--systemName'],
{'help': 'System Name',
'action': 'store',
'dest': 'systemName', }),
(['-in', '--interfaceName'],
{'help': 'Interface Name',
'action': 'store',
'dest': 'interfaceName', }),
(['-vn', '--versionNumber'],
{'help': 'Version Number',
'action': 'store',
'dest': 'versionNumber', }),
(['-uc', '--useCase'],
{'help': 'Use Case',
'action': 'store',
'dest': 'useCase', }),
(['-env', '--environment'],
{'help': 'Environment',
'action': 'store',
'dest': 'environment', }),
(['-ws', '--webservice'],
{'help': 'specify the type of web service, eg: SOAP, REST(default)',
'action': 'store',
'dest': 'webService', }),
]
)
def post(self):
self.app.log.info('Inside post command!')
ws = ""
if self.app.pargs.systemName:
self.app.log.info("Received option: systemName => %s" % self.app.pargs.systemName)
if self.app.pargs.interfaceName:
self.app.log.info("Received option: interfaceName => %s" % self.app.pargs.interfaceName)
if self.app.pargs.versionNumber:
self.app.log.info("Received option: versionNumber => %s" % self.app.pargs.versionNumber)
if self.app.pargs.useCase:
self.app.log.info("Received option: useCase => %s" % self.app.pargs.useCase)
if self.app.pargs.environment:
self.app.log.info("Received option: environment => %s" % self.app.pargs.environment)
if self.app.pargs.webService:
self.app.log.info("Received option: webService => %s" % self.app.pargs.webService)
ws = self.app.pargs.webService
config = Configuration(self.app.pargs.systemName,
self.app.pargs.interfaceName,
self.app.pargs.versionNumber,
self.app.pargs.useCase,
self.app.pargs.environment,
None,
None,
self.app.pargs.webService)
context = Context(RequestStrategyBySOAP(), config)
if ws == "SOAP":
self.app.log.info("Client: Strategy is set to SOAP web service")
context.do_post_request()
else:
self.app.log.info("Client: Strategy is set to REST web service")
context.strategy = RequestStrategyByREST()
context.do_post_request()
@ex(
help='Delete Request Command',
arguments=[
(['-sn', '--systemName'],
{'help': 'System Name',
'action': 'store',
'dest': 'systemName', }),
(['-in', '--interfaceName'],
{'help': 'Interface Name',
'action': 'store',
'dest': 'interfaceName', }),
(['-vn', '--versionNumber'],
{'help': 'Version Number',
'action': 'store',
'dest': 'versionNumber', }),
(['-uc', '--useCase'],
{'help': 'Use Case',
'action': 'store',
'dest': 'useCase', }),
(['-env', '--environment'],
{'help': 'Environment',
'action': 'store',
'dest': 'environment', }),
(['-ws', '--webservice'],
{'help': 'specify the type of web service, eg: SOAP, REST(default)',
'action': 'store',
'dest': 'webService', }),
]
)
def delete(self):
self.app.log.info('Inside delete command!')
ws = ""
if self.app.pargs.systemName:
self.app.log.info("Received option: systemName => %s" % self.app.pargs.systemName)
if self.app.pargs.interfaceName:
self.app.log.info("Received option: interfaceName => %s" % self.app.pargs.interfaceName)
if self.app.pargs.versionNumber:
self.app.log.info("Received option: versionNumber => %s" % self.app.pargs.versionNumber)
if self.app.pargs.useCase:
self.app.log.info("Received option: useCase => %s" % self.app.pargs.useCase)
if self.app.pargs.environment:
self.app.log.info("Received option: environment => %s" % self.app.pargs.environment)
if self.app.pargs.webService:
self.app.log.info("Received option: webService => %s" % self.app.pargs.webService)
ws = self.app.pargs.webService
config = Configuration(self.app.pargs.systemName,
self.app.pargs.interfaceName,
self.app.pargs.versionNumber,
self.app.pargs.useCase,
self.app.pargs.environment,
None,
None,
self.app.pargs.webService)
context = Context(RequestStrategyBySOAP(), config)
if ws == "SOAP":
self.app.log.info("Client: Strategy is set to SOAP web service")
context.do_delete_request()
else:
self.app.log.info("Client: Strategy is set to REST web service")
context.strategy = RequestStrategyByREST()
context.do_delete_request()
class CliHTTPClient(App):
class Meta:
label = 'myapp'
handlers = [
Base,
]
with CliHTTPClient() as app:
defaults = init_defaults('myapp', 'log.logging')
defaults['log.logging']['file'] = 'my.log'
app.run()
| 41.943231
| 112
| 0.510567
| 895
| 9,605
| 5.463687
| 0.106145
| 0.131697
| 0.154601
| 0.083027
| 0.849898
| 0.849898
| 0.803681
| 0.803681
| 0.803681
| 0.803681
| 0
| 0
| 0.342634
| 9,605
| 228
| 113
| 42.127193
| 0.774469
| 0
| 0
| 0.798122
| 0
| 0
| 0.254659
| 0.002186
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014085
| false
| 0
| 0.023474
| 0
| 0.056338
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
428b4ec38d713e539d5ca56a9888b52102f149f8
| 8,807
|
py
|
Python
|
Python_BMP/paramchecks.py
|
TechnoTanuki/Python_BMP
|
d6f7e7a4b74f7d6e8761d618c156d37c97726038
|
[
"MIT"
] | 3
|
2022-02-24T15:46:43.000Z
|
2022-03-30T13:17:03.000Z
|
Python_BMP/paramchecks.py
|
TechnoTanuki/Python_BMP
|
d6f7e7a4b74f7d6e8761d618c156d37c97726038
|
[
"MIT"
] | null | null | null |
Python_BMP/paramchecks.py
|
TechnoTanuki/Python_BMP
|
d6f7e7a4b74f7d6e8761d618c156d37c97726038
|
[
"MIT"
] | null | null | null |
"""
Function decorators for parameter check
-----------------------------------
| Copyright 2022 by Joel C. Alcarez |
| [joelalcarez1975@gmail.com] |
|-----------------------------------|
| We make absolutely no warranty |
| of any kind, expressed or implied |
|-----------------------------------|
| Contact primary author |
| if you plan to use this |
| in a commercial product at |
| joelalcarez1975@gmail.com |
-----------------------------------
"""
from .messages import sysmsg
from .bmpconstants import(
bmpcolorbits, bmpx, bmpy)
from .inttools import readint
from .primitives2D import(
entirecircleisinboundary)
f = lambda bmp, x, y: (x < readint(bmpx,4,bmp) and \
y < readint(bmpy,4,bmp)) and \
(x > -1 and y > -1)
def intcircleparam(func):
"""Decorator to test if the
2nd, 3rd, 4th parameters
in a function that renders
circle are ints
Args:
function(bmp:array,x,y,r....)
Returns:
caller function
"""
def callf(*args, **kwargs):
if (type(args[1]) == int and \
type(args[2]) == int) and \
type(args[3]) == int:
return(func(*args, **kwargs))
else:
print(sysmsg['inttypereq'])
return(callf)
def intcircleparam24bitonly(func):
"""Decorator to test if 2nd, 3rd,
4th parameters in a function
that renders circle are ints
and restrict the use of this
function to only 24-bit or
RGB bitmaps (1st parameter)
Args:
function(bmp:array,x,y,r....)
Returns:
caller function
"""
def callf(*args, **kwargs):
if args[0][bmpcolorbits] != 24:
print(sysmsg['not24bit'])
elif (type(args[1]) == int and \
type(args[2]) == int) and \
type(args[3]) == int:
return(func(*args, **kwargs))
else:
print(sysmsg['inttypereq'])
return(callf)
def func24bitonly(func):
"""Decorator to restrict the
use of this function to only
24-bit or RGB bitmaps
(1st parameter)
Args:
function(bmp:array,...)
Returns:
caller function
"""
def callf(*args, **kwargs):
if args[0][bmpcolorbits] != 24:
print(sysmsg['not24bit'])
else:
return(func(*args, **kwargs))
return(callf)
def func24bitonlyandentirerectinboundary(func):
"""Decorator to restrict the
use of this function to only
24-bit or RGB bitmaps
(1st parameter) and ensure that
the 2nd, 3rd, 4th and 5th
parameters are ints whose
values when interpreted as
x and y coordinates lay
within the RGB bitmap.
Args:
function
Returns:
caller function
"""
def callf(*args, **kwargs):
bmp = args[0]
x1 = args[1]
y1 = args[2]
x2 = args[3]
y2 = args[4]
if bmp[bmpcolorbits] != 24:
print(sysmsg['not24bit'])
elif (type(x1) == int and type(x2) == int) and \
(type(y1) == int and type(y2) == int):
if f(bmp, x1, y1) and f(bmp, x2, y2):
return(func(*args, **kwargs))
else:
print(sysmsg['regionoutofbounds'])
else:
print(sysmsg['inttypereq'])
return(callf)
def func24bitonlyandentirecircleinboundary(func):
"""Decorator to restrict the
use of this function to only
24-bit bitmaps (1st parameter)
and ensure that the 2nd, 3rd,
4th parameters are ints whose
values when interpreted as
x, y and radius of a circle
lay within the RGB bitmap.
Args:
function(bmp:array,x:int,y:int,r:int...)
Returns:
caller function
"""
def callf(*args, **kwargs):
bmp = args[0]
x = args[1]
y = args[2]
r = args[3]
if bmp[bmpcolorbits] != 24:
print(sysmsg['not24bit'])
elif (type(x) == int and type(y) == int) \
and type(r) == int:
if entirecircleisinboundary(
x, y, -1, readint(bmpx,4,bmp),
-1, readint(bmpy,4,bmp), r):
return(func(*args, **kwargs))
else:
print(sysmsg['regionoutofbounds'])
else:
print(sysmsg['inttypereq'])
return(callf)
def func8and24bitonlyandentirecircleinboundary(func):
"""Decorator to restrict the
use of this function to only
24-bit or 8-bit bitmaps
(1st parameter) and ensure
that the 2nd, 3rd, 4th
parameters are ints whose
values when interpreted as
x, y and radius of a circle
lay within the RGB bitmap.
Args:
function(bmp:array,x:int,y:int,r:int...)
Returns:
caller function
"""
def callf(*args, **kwargs):
bmp = args[0]
x = args[1]
y = args[2]
r = args[3]
if bmp[bmpcolorbits] not in [24, 8]:
print(sysmsg['not24or8bit'])
elif (type(x) == int and type(y) == int) \
and type(r) == int:
if entirecircleisinboundary(
x, y, -1, readint(bmpx, 4, bmp),
-1, readint(bmpy, 4, bmp), r):
return(func(*args, **kwargs))
else:
print(sysmsg['regionoutofbounds'])
else:
print(sysmsg['inttypereq'])
return(callf)
def func8and24bitonly(func):
"""Decorator to restrict the
use of this function to
only 24-bit or 8-bit bitmaps
(1st parameter)
Args:
function(bmp:array,...)
Returns:
caller function
"""
def callf(*args, **kwargs):
if args[0][bmpcolorbits] not in [24, 8]:
print(sysmsg['not24or8bit'])
else:
return(func(*args, **kwargs))
return(callf)
def func8and24bitonlyandentirerectinboundary(func):
"""Decorator to restrict the
use of this functiom to only
24 bit or 8 bit bitmaps
(1st parameter) and ensure
that the 2nd, 3rd, 4th and
5th parameters are ints whose
values when interpreted as
x and y coordinates of a
rectangle lay within
the RGB bitmap.
Args:
function
Returns:
caller function
"""
def callf(*args, **kwargs):
bmp = args[0]
x1 = args[1]
y1 = args[2]
x2 = args[3]
y2 = args[4]
if bmp[bmpcolorbits] not in [24, 8]:
print(sysmsg['not24or8bit'])
elif (type(x1) == int and type(x2) == int) and \
(type(y1) == int and type(y2) == int):
if f(bmp, x1, y1) and f(bmp, x2, y2):
return(func(*args, **kwargs))
else:
print(sysmsg['regionoutofbounds'])
else:
print(sysmsg['inttypereq'])
return(callf)
def entirerectinboundary(func):
"""Decorator to ensure that the
2nd, 3rd, 4th and 5th
parameters are ints whose
values when interpreted as
x and y coordinates of a
rectangle lay within the
bitmap.
Args:
function
Returns:
caller function
"""
def callf(*args, **kwargs):
bmp = args[0]
x1 = args[1]
y1 = args[2]
x2 = args[3]
y2 = args[4]
if (type(x1) == int and type(x2) == int) and \
(type(y1) == int and type(y2) == int):
if not (f(bmp, x1, y1) and f(bmp, x2, y2)):
print(sysmsg['regionoutofbounds'])
else:
return(func(*args, **kwargs))
else:
print(sysmsg['inttypereq'])
return(callf)
def entirecircleinboundary(func):
"""Decorator to ensure that the 2nd,
3rd, 4th parameters are ints
whose values when interpreted
as the centerpoint x, y
and radius r of a circle
lay within the RGB bitmap.
Args:
function(bmp:array,x:int,y:int,r:int...)
Returns:
caller function
"""
def callf(*args, **kwargs):
bmp = args[0]
x = args[1]
y = args[2]
r = args[3]
if (type(x) == int and type(y) == int) \
and type(r) == int:
if entirecircleisinboundary(
x, y, -1, readint(bmpx, 4, bmp),
-1, readint(bmpx, 4, bmp), r):
return(func(*args, **kwargs))
else:
print(sysmsg['regionoutofbounds'])
else:
print(sysmsg['inttypereq'])
return(callf)
| 26.687879
| 56
| 0.512547
| 1,011
| 8,807
| 4.464886
| 0.129575
| 0.051174
| 0.042091
| 0.053168
| 0.833629
| 0.821887
| 0.821887
| 0.821001
| 0.789765
| 0.769606
| 0
| 0.036044
| 0.360509
| 8,807
| 329
| 57
| 26.768997
| 0.765447
| 0.354945
| 0
| 0.826667
| 0
| 0
| 0.047637
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.026667
| 0
| 0.16
| 0.14
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
429e46c5bff23e15450505a9d317c242f6138008
| 227
|
py
|
Python
|
scenes/stage/screen_infinite.py
|
TheLokin/Kabalayn
|
2034364e03e8eca909df11dcc393d70edd18493b
|
[
"MIT"
] | null | null | null |
scenes/stage/screen_infinite.py
|
TheLokin/Kabalayn
|
2034364e03e8eca909df11dcc393d70edd18493b
|
[
"MIT"
] | null | null | null |
scenes/stage/screen_infinite.py
|
TheLokin/Kabalayn
|
2034364e03e8eca909df11dcc393d70edd18493b
|
[
"MIT"
] | null | null | null |
from .screen_play import ScreenPlay
class ScreenInfinite(ScreenPlay):
def __init__(self, entities, balls, max_balls):
super().__init__(entities, balls, max_balls)
def check_score(self, scene):
pass
| 28.375
| 52
| 0.700441
| 27
| 227
| 5.444444
| 0.666667
| 0.176871
| 0.217687
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.207048
| 227
| 8
| 53
| 28.375
| 0.816667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
c44aace96f9352fe7554971c809c1b8cc94d6b80
| 207
|
py
|
Python
|
clickable/virtualenv/__init__.py
|
lalmeras/clickable
|
6182f8a106c202a9bb1e6d7142e2b5b4734c13f3
|
[
"BSD-3-Clause"
] | null | null | null |
clickable/virtualenv/__init__.py
|
lalmeras/clickable
|
6182f8a106c202a9bb1e6d7142e2b5b4734c13f3
|
[
"BSD-3-Clause"
] | 297
|
2017-09-29T23:51:42.000Z
|
2021-08-31T09:27:17.000Z
|
clickable/virtualenv/__init__.py
|
lalmeras/clickable
|
6182f8a106c202a9bb1e6d7142e2b5b4734c13f3
|
[
"BSD-3-Clause"
] | null | null | null |
from .virtualenv import _virtualenv
from .virtualenv import _pip_packages
def virtualenv(path_resolver, virtualenv):
_virtualenv(path_resolver, virtualenv)
_pip_packages(path_resolver, virtualenv)
| 25.875
| 44
| 0.821256
| 23
| 207
| 7
| 0.347826
| 0.223602
| 0.409938
| 0.397516
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120773
| 207
| 7
| 45
| 29.571429
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
676432ef1a16ffed6b0d877369442d35b5572e7c
| 115,891
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/policy-forwarding/policies/policy/rules/rule/ip/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__ip_version",
"__source_ip_address",
"__source_ip_flow_label",
"__destination_ip_address",
"__destination_ip_flow_label",
"__dscp",
"__protocol",
"__hop_limit",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__ip_version = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"unknown": {"value": 0}, "ipv4": {"value": 1}, "ipv6": {"value": 2}
},
),
is_leaf=True,
yang_name="ip-version",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-version",
is_config=False,
)
self.__source_ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="source-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
self.__source_ip_flow_label = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="source-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
self.__destination_ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="destination-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
self.__destination_ip_flow_label = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="destination-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
self.__dscp = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..63"]},
),
is_leaf=True,
yang_name="dscp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:dscp",
is_config=False,
)
self.__protocol = YANGDynClass(
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..254"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
},
),
],
is_leaf=True,
yang_name="protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-pkt-match-types:ip-protocol-type",
is_config=False,
)
self.__hop_limit = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..255"]},
),
is_leaf=True,
yang_name="hop-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"policy-forwarding",
"policies",
"policy",
"rules",
"rule",
"ip",
"state",
]
def _get_ip_version(self):
"""
Getter method for ip_version, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/ip_version (inet:ip-version)
YANG Description: IP version of the header.
"""
return self.__ip_version
def _set_ip_version(self, v, load=False):
"""
Setter method for ip_version, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/ip_version (inet:ip-version)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_version is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_version() directly.
YANG Description: IP version of the header.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"unknown": {"value": 0},
"ipv4": {"value": 1},
"ipv6": {"value": 2},
},
),
is_leaf=True,
yang_name="ip-version",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-version",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ip_version must be of a type compatible with inet:ip-version""",
"defined-type": "inet:ip-version",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'unknown': {'value': 0}, 'ipv4': {'value': 1}, 'ipv6': {'value': 2}},), is_leaf=True, yang_name="ip-version", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ip-version', is_config=False)""",
}
)
self.__ip_version = t
if hasattr(self, "_set"):
self._set()
def _unset_ip_version(self):
self.__ip_version = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"unknown": {"value": 0}, "ipv4": {"value": 1}, "ipv6": {"value": 2}
},
),
is_leaf=True,
yang_name="ip-version",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-version",
is_config=False,
)
def _get_source_ip_address(self):
"""
Getter method for source_ip_address, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/source_ip_address (inet:ip-prefix)
YANG Description: Destination IP address prefix.
"""
return self.__source_ip_address
def _set_source_ip_address(self, v, load=False):
"""
Setter method for source_ip_address, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/source_ip_address (inet:ip-prefix)
If this variable is read-only (config: false) in the
source YANG file, then _set_source_ip_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_source_ip_address() directly.
YANG Description: Destination IP address prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="source-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """source_ip_address must be of a type compatible with inet:ip-prefix""",
"defined-type": "inet:ip-prefix",
"generated-type": """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}),], is_leaf=True, yang_name="source-ip-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ip-prefix', is_config=False)""",
}
)
self.__source_ip_address = t
if hasattr(self, "_set"):
self._set()
def _unset_source_ip_address(self):
self.__source_ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="source-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
def _get_source_ip_flow_label(self):
"""
Getter method for source_ip_flow_label, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/source_ip_flow_label (inet:ipv6-flow-label)
YANG Description: Source IPv6 Flow label.
"""
return self.__source_ip_flow_label
def _set_source_ip_flow_label(self, v, load=False):
"""
Setter method for source_ip_flow_label, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/source_ip_flow_label (inet:ipv6-flow-label)
If this variable is read-only (config: false) in the
source YANG file, then _set_source_ip_flow_label is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_source_ip_flow_label() directly.
YANG Description: Source IPv6 Flow label.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="source-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """source_ip_flow_label must be of a type compatible with inet:ipv6-flow-label""",
"defined-type": "inet:ipv6-flow-label",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0..1048575']}), is_leaf=True, yang_name="source-ip-flow-label", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ipv6-flow-label', is_config=False)""",
}
)
self.__source_ip_flow_label = t
if hasattr(self, "_set"):
self._set()
def _unset_source_ip_flow_label(self):
self.__source_ip_flow_label = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="source-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
def _get_destination_ip_address(self):
"""
Getter method for destination_ip_address, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/destination_ip_address (inet:ip-prefix)
YANG Description: Destination IP address prefix.
"""
return self.__destination_ip_address
def _set_destination_ip_address(self, v, load=False):
"""
Setter method for destination_ip_address, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/destination_ip_address (inet:ip-prefix)
If this variable is read-only (config: false) in the
source YANG file, then _set_destination_ip_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_destination_ip_address() directly.
YANG Description: Destination IP address prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="destination-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """destination_ip_address must be of a type compatible with inet:ip-prefix""",
"defined-type": "inet:ip-prefix",
"generated-type": """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}),], is_leaf=True, yang_name="destination-ip-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ip-prefix', is_config=False)""",
}
)
self.__destination_ip_address = t
if hasattr(self, "_set"):
self._set()
def _unset_destination_ip_address(self):
self.__destination_ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="destination-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
def _get_destination_ip_flow_label(self):
"""
Getter method for destination_ip_flow_label, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/destination_ip_flow_label (inet:ipv6-flow-label)
YANG Description: Destination IPv6 Flow label.
"""
return self.__destination_ip_flow_label
def _set_destination_ip_flow_label(self, v, load=False):
"""
Setter method for destination_ip_flow_label, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/destination_ip_flow_label (inet:ipv6-flow-label)
If this variable is read-only (config: false) in the
source YANG file, then _set_destination_ip_flow_label is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_destination_ip_flow_label() directly.
YANG Description: Destination IPv6 Flow label.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="destination-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """destination_ip_flow_label must be of a type compatible with inet:ipv6-flow-label""",
"defined-type": "inet:ipv6-flow-label",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0..1048575']}), is_leaf=True, yang_name="destination-ip-flow-label", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ipv6-flow-label', is_config=False)""",
}
)
self.__destination_ip_flow_label = t
if hasattr(self, "_set"):
self._set()
def _unset_destination_ip_flow_label(self):
self.__destination_ip_flow_label = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="destination-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
def _get_dscp(self):
"""
Getter method for dscp, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/dscp (inet:dscp)
YANG Description: Value of diffserv codepoint.
"""
return self.__dscp
def _set_dscp(self, v, load=False):
"""
Setter method for dscp, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/dscp (inet:dscp)
If this variable is read-only (config: false) in the
source YANG file, then _set_dscp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dscp() directly.
YANG Description: Value of diffserv codepoint.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..63"]},
),
is_leaf=True,
yang_name="dscp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:dscp",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """dscp must be of a type compatible with inet:dscp""",
"defined-type": "inet:dscp",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..63']}), is_leaf=True, yang_name="dscp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:dscp', is_config=False)""",
}
)
self.__dscp = t
if hasattr(self, "_set"):
self._set()
def _unset_dscp(self):
self.__dscp = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..63"]},
),
is_leaf=True,
yang_name="dscp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:dscp",
is_config=False,
)
def _get_protocol(self):
"""
Getter method for protocol, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/protocol (oc-pkt-match-types:ip-protocol-type)
YANG Description: Internet Protocol number.
"""
return self.__protocol
def _set_protocol(self, v, load=False):
"""
Setter method for protocol, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/protocol (oc-pkt-match-types:ip-protocol-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_protocol is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_protocol() directly.
YANG Description: Internet Protocol number.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..254"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
},
),
],
is_leaf=True,
yang_name="protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-pkt-match-types:ip-protocol-type",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """protocol must be of a type compatible with oc-pkt-match-types:ip-protocol-type""",
"defined-type": "oc-pkt-match-types:ip-protocol-type",
"generated-type": """YANGDynClass(base=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..254']}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'IP_TCP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_TCP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_UDP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_UDP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_ICMP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_ICMP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_IGMP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_IGMP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_PIM': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_PIM': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_RSVP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_RSVP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_GRE': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_GRE': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_AUTH': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_AUTH': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_L2TP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_L2TP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}},),], is_leaf=True, yang_name="protocol", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-pkt-match-types:ip-protocol-type', is_config=False)""",
}
)
self.__protocol = t
if hasattr(self, "_set"):
self._set()
def _unset_protocol(self):
self.__protocol = YANGDynClass(
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..254"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
},
),
],
is_leaf=True,
yang_name="protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-pkt-match-types:ip-protocol-type",
is_config=False,
)
def _get_hop_limit(self):
"""
Getter method for hop_limit, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/hop_limit (uint8)
YANG Description: The IP packet's hop limit -- known as TTL (in hops) in
IPv4 packets, and hop limit in IPv6
"""
return self.__hop_limit
def _set_hop_limit(self, v, load=False):
"""
Setter method for hop_limit, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/hop_limit (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_hop_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hop_limit() directly.
YANG Description: The IP packet's hop limit -- known as TTL (in hops) in
IPv4 packets, and hop limit in IPv6
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..255"]},
),
is_leaf=True,
yang_name="hop-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """hop_limit must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..255']}), is_leaf=True, yang_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__hop_limit = t
if hasattr(self, "_set"):
self._set()
def _unset_hop_limit(self):
self.__hop_limit = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..255"]},
),
is_leaf=True,
yang_name="hop-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
ip_version = __builtin__.property(_get_ip_version)
source_ip_address = __builtin__.property(_get_source_ip_address)
source_ip_flow_label = __builtin__.property(_get_source_ip_flow_label)
destination_ip_address = __builtin__.property(_get_destination_ip_address)
destination_ip_flow_label = __builtin__.property(_get_destination_ip_flow_label)
dscp = __builtin__.property(_get_dscp)
protocol = __builtin__.property(_get_protocol)
hop_limit = __builtin__.property(_get_hop_limit)
_pyangbind_elements = OrderedDict(
[
("ip_version", ip_version),
("source_ip_address", source_ip_address),
("source_ip_flow_label", source_ip_flow_label),
("destination_ip_address", destination_ip_address),
("destination_ip_flow_label", destination_ip_flow_label),
("dscp", dscp),
("protocol", protocol),
("hop_limit", hop_limit),
]
)
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/policy-forwarding/policies/policy/rules/rule/ip/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__ip_version",
"__source_ip_address",
"__source_ip_flow_label",
"__destination_ip_address",
"__destination_ip_flow_label",
"__dscp",
"__protocol",
"__hop_limit",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__ip_version = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"unknown": {"value": 0}, "ipv4": {"value": 1}, "ipv6": {"value": 2}
},
),
is_leaf=True,
yang_name="ip-version",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-version",
is_config=False,
)
self.__source_ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="source-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
self.__source_ip_flow_label = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="source-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
self.__destination_ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="destination-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
self.__destination_ip_flow_label = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="destination-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
self.__dscp = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..63"]},
),
is_leaf=True,
yang_name="dscp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:dscp",
is_config=False,
)
self.__protocol = YANGDynClass(
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..254"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
},
),
],
is_leaf=True,
yang_name="protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-pkt-match-types:ip-protocol-type",
is_config=False,
)
self.__hop_limit = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..255"]},
),
is_leaf=True,
yang_name="hop-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"policy-forwarding",
"policies",
"policy",
"rules",
"rule",
"ip",
"state",
]
def _get_ip_version(self):
"""
Getter method for ip_version, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/ip_version (inet:ip-version)
YANG Description: IP version of the header.
"""
return self.__ip_version
def _set_ip_version(self, v, load=False):
"""
Setter method for ip_version, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/ip_version (inet:ip-version)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_version is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_version() directly.
YANG Description: IP version of the header.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"unknown": {"value": 0},
"ipv4": {"value": 1},
"ipv6": {"value": 2},
},
),
is_leaf=True,
yang_name="ip-version",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-version",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ip_version must be of a type compatible with inet:ip-version""",
"defined-type": "inet:ip-version",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'unknown': {'value': 0}, 'ipv4': {'value': 1}, 'ipv6': {'value': 2}},), is_leaf=True, yang_name="ip-version", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ip-version', is_config=False)""",
}
)
self.__ip_version = t
if hasattr(self, "_set"):
self._set()
def _unset_ip_version(self):
self.__ip_version = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"unknown": {"value": 0}, "ipv4": {"value": 1}, "ipv6": {"value": 2}
},
),
is_leaf=True,
yang_name="ip-version",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-version",
is_config=False,
)
def _get_source_ip_address(self):
"""
Getter method for source_ip_address, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/source_ip_address (inet:ip-prefix)
YANG Description: Destination IP address prefix.
"""
return self.__source_ip_address
def _set_source_ip_address(self, v, load=False):
"""
Setter method for source_ip_address, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/source_ip_address (inet:ip-prefix)
If this variable is read-only (config: false) in the
source YANG file, then _set_source_ip_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_source_ip_address() directly.
YANG Description: Destination IP address prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="source-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """source_ip_address must be of a type compatible with inet:ip-prefix""",
"defined-type": "inet:ip-prefix",
"generated-type": """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}),], is_leaf=True, yang_name="source-ip-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ip-prefix', is_config=False)""",
}
)
self.__source_ip_address = t
if hasattr(self, "_set"):
self._set()
def _unset_source_ip_address(self):
self.__source_ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="source-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
def _get_source_ip_flow_label(self):
"""
Getter method for source_ip_flow_label, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/source_ip_flow_label (inet:ipv6-flow-label)
YANG Description: Source IPv6 Flow label.
"""
return self.__source_ip_flow_label
def _set_source_ip_flow_label(self, v, load=False):
"""
Setter method for source_ip_flow_label, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/source_ip_flow_label (inet:ipv6-flow-label)
If this variable is read-only (config: false) in the
source YANG file, then _set_source_ip_flow_label is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_source_ip_flow_label() directly.
YANG Description: Source IPv6 Flow label.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="source-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """source_ip_flow_label must be of a type compatible with inet:ipv6-flow-label""",
"defined-type": "inet:ipv6-flow-label",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0..1048575']}), is_leaf=True, yang_name="source-ip-flow-label", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ipv6-flow-label', is_config=False)""",
}
)
self.__source_ip_flow_label = t
if hasattr(self, "_set"):
self._set()
def _unset_source_ip_flow_label(self):
self.__source_ip_flow_label = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="source-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
def _get_destination_ip_address(self):
"""
Getter method for destination_ip_address, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/destination_ip_address (inet:ip-prefix)
YANG Description: Destination IP address prefix.
"""
return self.__destination_ip_address
def _set_destination_ip_address(self, v, load=False):
"""
Setter method for destination_ip_address, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/destination_ip_address (inet:ip-prefix)
If this variable is read-only (config: false) in the
source YANG file, then _set_destination_ip_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_destination_ip_address() directly.
YANG Description: Destination IP address prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="destination-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """destination_ip_address must be of a type compatible with inet:ip-prefix""",
"defined-type": "inet:ip-prefix",
"generated-type": """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}),], is_leaf=True, yang_name="destination-ip-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ip-prefix', is_config=False)""",
}
)
self.__destination_ip_address = t
if hasattr(self, "_set"):
self._set()
def _unset_destination_ip_address(self):
self.__destination_ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([0-9])|([1-2][0-9])|(3[0-2]))"
},
),
RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
],
is_leaf=True,
yang_name="destination-ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-prefix",
is_config=False,
)
def _get_destination_ip_flow_label(self):
"""
Getter method for destination_ip_flow_label, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/destination_ip_flow_label (inet:ipv6-flow-label)
YANG Description: Destination IPv6 Flow label.
"""
return self.__destination_ip_flow_label
def _set_destination_ip_flow_label(self, v, load=False):
"""
Setter method for destination_ip_flow_label, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/destination_ip_flow_label (inet:ipv6-flow-label)
If this variable is read-only (config: false) in the
source YANG file, then _set_destination_ip_flow_label is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_destination_ip_flow_label() directly.
YANG Description: Destination IPv6 Flow label.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="destination-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """destination_ip_flow_label must be of a type compatible with inet:ipv6-flow-label""",
"defined-type": "inet:ipv6-flow-label",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0..1048575']}), is_leaf=True, yang_name="destination-ip-flow-label", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ipv6-flow-label', is_config=False)""",
}
)
self.__destination_ip_flow_label = t
if hasattr(self, "_set"):
self._set()
def _unset_destination_ip_flow_label(self):
self.__destination_ip_flow_label = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["0..1048575"]},
),
is_leaf=True,
yang_name="destination-ip-flow-label",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-flow-label",
is_config=False,
)
def _get_dscp(self):
"""
Getter method for dscp, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/dscp (inet:dscp)
YANG Description: Value of diffserv codepoint.
"""
return self.__dscp
def _set_dscp(self, v, load=False):
"""
Setter method for dscp, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/dscp (inet:dscp)
If this variable is read-only (config: false) in the
source YANG file, then _set_dscp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dscp() directly.
YANG Description: Value of diffserv codepoint.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..63"]},
),
is_leaf=True,
yang_name="dscp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:dscp",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """dscp must be of a type compatible with inet:dscp""",
"defined-type": "inet:dscp",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..63']}), is_leaf=True, yang_name="dscp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:dscp', is_config=False)""",
}
)
self.__dscp = t
if hasattr(self, "_set"):
self._set()
def _unset_dscp(self):
self.__dscp = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..63"]},
),
is_leaf=True,
yang_name="dscp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:dscp",
is_config=False,
)
def _get_protocol(self):
"""
Getter method for protocol, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/protocol (oc-pkt-match-types:ip-protocol-type)
YANG Description: Internet Protocol number.
"""
return self.__protocol
def _set_protocol(self, v, load=False):
"""
Setter method for protocol, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/protocol (oc-pkt-match-types:ip-protocol-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_protocol is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_protocol() directly.
YANG Description: Internet Protocol number.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..254"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
},
),
],
is_leaf=True,
yang_name="protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-pkt-match-types:ip-protocol-type",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """protocol must be of a type compatible with oc-pkt-match-types:ip-protocol-type""",
"defined-type": "oc-pkt-match-types:ip-protocol-type",
"generated-type": """YANGDynClass(base=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..254']}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'IP_TCP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_TCP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_UDP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_UDP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_ICMP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_ICMP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_IGMP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_IGMP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_PIM': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_PIM': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_RSVP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_RSVP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_GRE': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_GRE': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_AUTH': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_AUTH': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'IP_L2TP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}, 'oc-pkt-match-types:IP_L2TP': {'@module': 'openconfig-packet-match-types', '@namespace': 'http://openconfig.net/yang/packet-match-types'}},),], is_leaf=True, yang_name="protocol", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-pkt-match-types:ip-protocol-type', is_config=False)""",
}
)
self.__protocol = t
if hasattr(self, "_set"):
self._set()
def _unset_protocol(self):
self.__protocol = YANGDynClass(
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..254"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_TCP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_UDP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_ICMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_IGMP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_PIM": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_RSVP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_GRE": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_AUTH": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
"oc-pkt-match-types:IP_L2TP": {
"@module": "openconfig-packet-match-types",
"@namespace": "http://openconfig.net/yang/packet-match-types",
},
},
),
],
is_leaf=True,
yang_name="protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-pkt-match-types:ip-protocol-type",
is_config=False,
)
def _get_hop_limit(self):
"""
Getter method for hop_limit, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/hop_limit (uint8)
YANG Description: The IP packet's hop limit -- known as TTL (in hops) in
IPv4 packets, and hop limit in IPv6
"""
return self.__hop_limit
def _set_hop_limit(self, v, load=False):
"""
Setter method for hop_limit, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip/state/hop_limit (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_hop_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hop_limit() directly.
YANG Description: The IP packet's hop limit -- known as TTL (in hops) in
IPv4 packets, and hop limit in IPv6
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..255"]},
),
is_leaf=True,
yang_name="hop-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """hop_limit must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..255']}), is_leaf=True, yang_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__hop_limit = t
if hasattr(self, "_set"):
self._set()
def _unset_hop_limit(self):
self.__hop_limit = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..255"]},
),
is_leaf=True,
yang_name="hop-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
ip_version = __builtin__.property(_get_ip_version)
source_ip_address = __builtin__.property(_get_source_ip_address)
source_ip_flow_label = __builtin__.property(_get_source_ip_flow_label)
destination_ip_address = __builtin__.property(_get_destination_ip_address)
destination_ip_flow_label = __builtin__.property(_get_destination_ip_flow_label)
dscp = __builtin__.property(_get_dscp)
protocol = __builtin__.property(_get_protocol)
hop_limit = __builtin__.property(_get_hop_limit)
_pyangbind_elements = OrderedDict(
[
("ip_version", ip_version),
("source_ip_address", source_ip_address),
("source_ip_flow_label", source_ip_flow_label),
("destination_ip_address", destination_ip_address),
("destination_ip_flow_label", destination_ip_flow_label),
("dscp", dscp),
("protocol", protocol),
("hop_limit", hop_limit),
]
)
| 50.4532
| 2,940
| 0.516347
| 12,162
| 115,891
| 4.733268
| 0.019487
| 0.065316
| 0.080047
| 0.093944
| 0.993486
| 0.989716
| 0.989716
| 0.989716
| 0.989716
| 0.989716
| 0
| 0.032534
| 0.339586
| 115,891
| 2,296
| 2,941
| 50.475174
| 0.719609
| 0.106988
| 0
| 0.863405
| 0
| 0.021407
| 0.379772
| 0.187841
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026504
| false
| 0
| 0.007645
| 0
| 0.057594
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
678aee95d1011c3bec4c3c0b3d9de95e094df2a7
| 888
|
py
|
Python
|
django_exporter/views.py
|
shashisp/django-exporter
|
9f2e7ab2ee51d4dc78f06fd5e5645bb5a6ee75eb
|
[
"MIT"
] | null | null | null |
django_exporter/views.py
|
shashisp/django-exporter
|
9f2e7ab2ee51d4dc78f06fd5e5645bb5a6ee75eb
|
[
"MIT"
] | null | null | null |
django_exporter/views.py
|
shashisp/django-exporter
|
9f2e7ab2ee51d4dc78f06fd5e5645bb5a6ee75eb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.views.generic import (
CreateView,
DeleteView,
DetailView,
UpdateView,
ListView
)
from .models import (
Reusable django app to export large datasets.,
)
class Reusable django app to export large datasets.CreateView(CreateView):
model = Reusable django app to export large datasets.
class Reusable django app to export large datasets.DeleteView(DeleteView):
model = Reusable django app to export large datasets.
class Reusable django app to export large datasets.DetailView(DetailView):
model = Reusable django app to export large datasets.
class Reusable django app to export large datasets.UpdateView(UpdateView):
model = Reusable django app to export large datasets.
class Reusable django app to export large datasets.ListView(ListView):
model = Reusable django app to export large datasets.
| 22.769231
| 74
| 0.75
| 113
| 888
| 5.893805
| 0.19469
| 0.231231
| 0.280781
| 0.313814
| 0.702703
| 0.702703
| 0.702703
| 0.702703
| 0.638138
| 0.638138
| 0
| 0.001393
| 0.191441
| 888
| 38
| 75
| 23.368421
| 0.926184
| 0.023649
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.1
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
67b13318b3f5d15c3234eda50ed0df242665ecd4
| 8,656
|
py
|
Python
|
tests/test_create_accounts_free_trial_qtest.py
|
alexko371/Python-Selenium-AofL
|
53a988fdfa06fae95a2f29fa677b46698336ef11
|
[
"Apache-2.0"
] | null | null | null |
tests/test_create_accounts_free_trial_qtest.py
|
alexko371/Python-Selenium-AofL
|
53a988fdfa06fae95a2f29fa677b46698336ef11
|
[
"Apache-2.0"
] | null | null | null |
tests/test_create_accounts_free_trial_qtest.py
|
alexko371/Python-Selenium-AofL
|
53a988fdfa06fae95a2f29fa677b46698336ef11
|
[
"Apache-2.0"
] | null | null | null |
def test_create_free_trial_girl_and_boy(app):
app.sign_up.open_abcmouse_web_page("https://dk.qtest.abcmouse.com/abt/subscription") # Update URL QTEST!
app.sign_up.submit_and_confirm_email("ipad@dk.test") ##### Change Email Credentials!
app.sign_up.submit_and_confirm_password("test123")
app.payment_info.submit_cc_qtest(cardholder="Test Alex Ko", code_cvv="1234", zip_code="12345")
#### [NEW] Check here to agree that, after your free trial month, you will be charged $9.95.
# app.credit_info.check_to_agree()
app.sign_up.click_submit_button()
##### Annual Subscription Page
app.sign_up.select_no_thanks_annual_subscription()
app.sign_up.close_annual_subscription_pop_up()
###### Assessment Subscription Page
app.sign_up.select_no_thanks_free_trial_assessment_subscription()
assert app.sign_up.thank_you_text()
app.sign_up.click_submit_continue_registration_button()
###### ON-BOARDING PAGE
app.onboarding.welcome_page_click_get_started_button()
app.onboarding.survey_click_continue_buttons()
app.onboarding.input_parent_name(parent_first_name="AK Test", parent_family_name="Ko Test")
##### (grade Levels' IDs: [toddler-time] [preschool] [pre-k] [kindergarten] [first-grade] [second-grade]
app.onboarding.create_child_profile(child_name="Nancy", gender="F", academic_level="toddler-time")
app.onboarding.choose_avatar(avatar="girl_avatar03") ##### girl_avatar01 | girl_avatar16
app.onboarding.choose_hamster_and_fish(hamster="hamster-1", fish="fish-1", hamster_name="My Ham-StAr")
app.onboarding.skip_video()
app.onboarding.add_child()
##### (grade Levels' IDs: [toddler-time] [preschool] [pre-k] [kindergarten] [first-grade] [second-grade]
app.onboarding.create_child_profile(child_name="Nick", gender="M", academic_level="kindergarten")
app.onboarding.choose_avatar(avatar="boy_avatar07") ##### | boy_avatar03 | girl_avatar03
app.onboarding.choose_hamster_and_fish(hamster="hamster-2", fish="fish-2", hamster_name="My Ham-StAr")
app.onboarding.go_to_shp()
# assert app.onboarding.mouse_pop_up()
def test_create_free_trial_boy_and_girl(app):
app.sign_up.open_abcmouse_web_page("https://dk.qtest.abcmouse.com/abt/subscription") # Update URL QTEST!
app.sign_up.submit_and_confirm_email("android1@dk.test") ##### Change Email Credentials!
app.sign_up.submit_and_confirm_password("test123")
app.payment_info.submit_cc_qtest(cardholder="Test Alex Ko", code_cvv="1234", zip_code="12345")
# #### [NEW] Check here to agree that, after your free trial month, you will be charged $9.95.
# app.credit_info.check_to_agree()
app.sign_up.click_submit_button()
##### Annual Subscription Page
app.sign_up.select_no_thanks_annual_subscription()
app.sign_up.close_annual_subscription_pop_up()
###### Assessment Subscription Page
app.sign_up.select_no_thanks_free_trial_assessment_subscription()
assert app.sign_up.thank_you_text()
app.sign_up.click_submit_continue_registration_button()
###### ON-BOARDING PAGE
app.onboarding.welcome_page_click_get_started_button()
app.onboarding.survey_click_continue_buttons()
app.onboarding.input_parent_name(parent_first_name="AK Test", parent_family_name="Ko Test")
##### (grade Levels' IDs: [toddler-time] [preschool] [pre-k] [kindergarten] [first-grade] [second-grade]
app.onboarding.create_child_profile(child_name="Tom", gender="M", academic_level="kindergarten")
app.onboarding.choose_avatar(avatar="boy_avatar05") ##### | boy_avatar03 | girl_avatar03
app.onboarding.choose_hamster_and_fish(hamster="hamster-3", fish="fish-3", hamster_name="My Ham-StAr")
app.onboarding.skip_video()
app.onboarding.add_child()
##### (grade Levels' IDs: [toddler-time] [preschool] [pre-k] [kindergarten] [first-grade] [second-grade]
app.onboarding.create_child_profile(child_name="Natalee", gender="F", academic_level="first-grade")
app.onboarding.choose_avatar(avatar="girl_avatar10") ##### girl_avatar01 | girl_avatar16
app.onboarding.choose_hamster_and_fish(hamster="hamster-4", fish="fish-4", hamster_name="My Ham-StAr")
app.onboarding.go_to_shp()
# assert app.onboarding.mouse_pop_up()
###### FREE TRIAL WITH ASSESSMENT
def test_create_free_trial_with_assessment_girl_and_boy(app):
app.sign_up.open_abcmouse_web_page("https://fp.qtest.abcmouse.com/abt/subscription") # Update URL QTEST!
app.sign_up.submit_and_confirm_email("ak2@fp.test") ##### Change Email Credentials!
app.sign_up.submit_and_confirm_password("test123")
app.payment_info.submit_cc_qtest(cardholder="Test Alex Ko", code_cvv="1234", zip_code="12345")
#### [NEW] Check here to agree that, after your free trial month, you will be charged $9.95.
# app.credit_info.check_to_agree()
app.sign_up.click_submit_button()
##### Annual Subscription Page
app.sign_up.select_no_thanks_annual_subscription()
app.sign_up.close_annual_subscription_pop_up()
###### Assessment Subscription Page
app.sign_up.select_free_trial_assessment_subscription()
app.sign_up.click_next_button_in_free_trial_congrats_assessment_pop_up()
app.sign_up.click_submit_continue_registration_button()
###### ON-BOARDING PAGE
app.onboarding.welcome_page_click_get_started_button()
app.onboarding.survey_click_continue_buttons()
app.onboarding.input_parent_name(parent_first_name="AK Test", parent_family_name="Ko Test")
##### (grade Levels' IDs: [toddler-time] [preschool] [pre-k] [kindergarten] [first-grade] [second-grade]
app.onboarding.create_child_profile(child_name="Marta (do1)", gender="F", academic_level="preschool")
app.onboarding.choose_avatar(avatar="girl_avatar01") ##### girl_avatar01 | girl_avatar16
app.onboarding.choose_hamster_and_fish(hamster="hamster-3", fish="fish-3", hamster_name="My Ham-StAr")
app.onboarding.skip_video()
app.onboarding.add_child()
##### (grade Levels' IDs: [toddler-time] [preschool] [pre-k] [kindergarten] [first-grade] [second-grade]
app.onboarding.create_child_profile(child_name="Kyle", gender="M", academic_level="kindergarten")
app.onboarding.choose_avatar(avatar="boy_avatar02") ##### | boy_avatar03 | girl_avatar03
app.onboarding.choose_hamster_and_fish(hamster="hamster-4", fish="fish-4", hamster_name="My Ham-StAr")
app.onboarding.go_to_shp()
# assert app.onboarding.mouse_pop_up()
def test_create_free_trial_with_assessment_boy_and_girl(app):
app.sign_up.open_abcmouse_web_page("https://bm.qtest.abcmouse.com/abt/subscription") # Update URL QTEST!
app.sign_up.submit_and_confirm_email("camp3@bm.test") ##### Change Email Credentials!
app.sign_up.submit_and_confirm_password("test123")
app.payment_info.submit_cc_qtest(cardholder="Test Alex Ko", code_cvv="1234", zip_code="12345")
#### [NEW] Check here to agree that, after your free trial month, you will be charged $9.95.
# app.credit_info.check_to_agree()
app.sign_up.click_submit_button()
##### Annual Subscription Page
app.sign_up.select_no_thanks_annual_subscription()
app.sign_up.close_annual_subscription_pop_up()
###### Assessment Subscription Page
app.sign_up.select_free_trial_assessment_subscription()
app.sign_up.click_next_button_in_free_trial_congrats_assessment_pop_up()
app.sign_up.click_submit_continue_registration_button()
###### ON-BOARDING PAGE
app.onboarding.welcome_page_click_get_started_button()
app.onboarding.survey_click_continue_buttons()
app.onboarding.input_parent_name(parent_first_name="AK Test", parent_family_name="Ko Test")
##### (grade Levels' IDs: [toddler-time] [preschool] [pre-k] [kindergarten] [first-grade] [second-grade]
app.onboarding.create_child_profile(child_name="Ben", gender="M", academic_level="kindergarten")
app.onboarding.choose_avatar(avatar="boy_avatar08") ##### | boy_avatar03 | girl_avatar03
app.onboarding.choose_hamster_and_fish(hamster="hamster-1", fish="fish-1", hamster_name="My Ham-StAr")
app.onboarding.skip_video()
app.onboarding.add_child()
##### (grade Levels' IDs: [toddler-time] [preschool] [pre-k] [kindergarten] [first-grade] [second-grade]
app.onboarding.create_child_profile(child_name="Gloria", gender="F", academic_level="first-grade")
app.onboarding.choose_avatar(avatar="girl_avatar11") ##### girl_avatar01 | girl_avatar16
app.onboarding.choose_hamster_and_fish(hamster="hamster-2", fish="fish-2", hamster_name="My Ham-StAr")
app.onboarding.go_to_shp()
# assert app.onboarding.mouse_pop_up()
| 60.957746
| 109
| 0.750347
| 1,212
| 8,656
| 5.023927
| 0.121287
| 0.11102
| 0.053211
| 0.022992
| 0.96469
| 0.961077
| 0.949581
| 0.94137
| 0.94137
| 0.94137
| 0
| 0.016704
| 0.114718
| 8,656
| 141
| 110
| 61.390071
| 0.777894
| 0.252657
| 0
| 0.717391
| 0
| 0
| 0.136335
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 1
| 0.043478
| false
| 0.043478
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67c1bd9258843a3a75955985045bf7fa451f74af
| 3,024
|
py
|
Python
|
tests/test_fio_bounds.py
|
jGaboardi/Fiona
|
d36e0c897c545e0e51fe759e540c85c117bf3fc1
|
[
"BSD-3-Clause"
] | 778
|
2015-01-03T18:29:34.000Z
|
2022-03-31T03:17:53.000Z
|
tests/test_fio_bounds.py
|
jGaboardi/Fiona
|
d36e0c897c545e0e51fe759e540c85c117bf3fc1
|
[
"BSD-3-Clause"
] | 848
|
2015-01-07T17:16:10.000Z
|
2022-03-27T23:14:58.000Z
|
tests/test_fio_bounds.py
|
jGaboardi/Fiona
|
d36e0c897c545e0e51fe759e540c85c117bf3fc1
|
[
"BSD-3-Clause"
] | 195
|
2015-01-29T21:48:37.000Z
|
2022-03-25T15:18:24.000Z
|
"""Tests for `$ fio bounds`."""
import re
from fiona.fio import bounds
from fiona.fio.main import main_group
def test_fail(runner):
result = runner.invoke(main_group, ['bounds', ], '5')
assert result.exit_code == 1
def test_seq(feature_seq, runner):
result = runner.invoke(main_group, ['bounds', ], feature_seq)
assert result.exit_code == 0
assert result.output.count('[') == result.output.count(']') == 2
assert len(re.findall(r'\d*\.\d*', result.output)) == 8
def test_seq_rs(feature_seq_pp_rs, runner):
result = runner.invoke(main_group, ['bounds', ], feature_seq_pp_rs)
assert result.exit_code == 0
assert result.output.count('[') == result.output.count(']') == 2
assert len(re.findall(r'\d*\.\d*', result.output)) == 8
def test_precision(feature_seq, runner):
result = runner.invoke(main_group, ['bounds', '--precision', 1], feature_seq)
assert result.exit_code == 0
assert result.output.count('[') == result.output.count(']') == 2
assert len(re.findall(r'\d*\.\d{1}\D', result.output)) == 8
def test_explode(feature_collection, runner):
result = runner.invoke(main_group, ['bounds', '--explode'], feature_collection)
assert result.exit_code == 0
assert result.output.count('[') == result.output.count(']') == 2
assert len(re.findall(r'\d*\.\d*', result.output)) == 8
def test_explode_pp(feature_collection_pp, runner):
result = runner.invoke(main_group, ['bounds', '--explode'], feature_collection_pp)
assert result.exit_code == 0
assert result.output.count('[') == result.output.count(']') == 2
assert len(re.findall(r'\d*\.\d*', result.output)) == 8
def test_with_id(feature_seq, runner):
result = runner.invoke(main_group, ['bounds', '--with-id'], feature_seq)
assert result.exit_code == 0
assert result.output.count('id') == result.output.count('bbox') == 2
def test_explode_with_id(feature_collection, runner):
result = runner.invoke(
main_group, ['bounds', '--explode', '--with-id'], feature_collection)
assert result.exit_code == 0
assert result.output.count('id') == result.output.count('bbox') == 2
def test_with_obj(feature_seq, runner):
result = runner.invoke(main_group, ['bounds', '--with-obj'], feature_seq)
assert result.exit_code == 0
assert result.output.count('geometry') == result.output.count('bbox') == 2
def test_bounds_explode_with_obj(feature_collection, runner):
result = runner.invoke(
main_group, ['bounds', '--explode', '--with-obj'], feature_collection)
assert result.exit_code == 0
assert result.output.count('geometry') == result.output.count('bbox') == 2
def test_explode_output_rs(feature_collection, runner):
result = runner.invoke(main_group, ['bounds', '--explode', '--rs'], feature_collection)
assert result.exit_code == 0
assert result.output.count('\x1e') == 2
assert result.output.count('[') == result.output.count(']') == 2
assert len(re.findall(r'\d*\.\d*', result.output)) == 8
| 36.878049
| 91
| 0.66832
| 413
| 3,024
| 4.72155
| 0.104116
| 0.166154
| 0.183077
| 0.135385
| 0.886667
| 0.882564
| 0.870769
| 0.832821
| 0.825641
| 0.750256
| 0
| 0.012451
| 0.150132
| 3,024
| 81
| 92
| 37.333333
| 0.746304
| 0.008267
| 0
| 0.490909
| 0
| 0
| 0.089876
| 0
| 0
| 0
| 0
| 0
| 0.509091
| 1
| 0.2
| false
| 0
| 0.054545
| 0
| 0.254545
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
67e4d6c6c1816724be48a2bf6e4a79a1bcd64db5
| 13,872
|
py
|
Python
|
sdk/python/pulumi_akamai/app_sec_by_pass_network_list.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-01-21T15:22:12.000Z
|
2021-08-25T14:15:29.000Z
|
sdk/python/pulumi_akamai/app_sec_by_pass_network_list.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 59
|
2020-08-13T14:39:36.000Z
|
2022-03-31T15:19:48.000Z
|
sdk/python/pulumi_akamai/app_sec_by_pass_network_list.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['AppSecByPassNetworkListArgs', 'AppSecByPassNetworkList']
@pulumi.input_type
class AppSecByPassNetworkListArgs:
def __init__(__self__, *,
bypass_network_lists: pulumi.Input[Sequence[pulumi.Input[str]]],
config_id: pulumi.Input[int],
security_policy_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AppSecByPassNetworkList resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] bypass_network_lists: . JSON array of network IDs that comprise the bypass list.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the network bypass lists being modified.
:param pulumi.Input[str] security_policy_id: The ID of the security policy governing the bypass network lists
"""
pulumi.set(__self__, "bypass_network_lists", bypass_network_lists)
pulumi.set(__self__, "config_id", config_id)
if security_policy_id is not None:
pulumi.set(__self__, "security_policy_id", security_policy_id)
@property
@pulumi.getter(name="bypassNetworkLists")
def bypass_network_lists(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
. JSON array of network IDs that comprise the bypass list.
"""
return pulumi.get(self, "bypass_network_lists")
@bypass_network_lists.setter
def bypass_network_lists(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "bypass_network_lists", value)
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Input[int]:
"""
. Unique identifier of the security configuration associated with the network bypass lists being modified.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: pulumi.Input[int]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the security policy governing the bypass network lists
"""
return pulumi.get(self, "security_policy_id")
@security_policy_id.setter
def security_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_policy_id", value)
@pulumi.input_type
class _AppSecByPassNetworkListState:
def __init__(__self__, *,
bypass_network_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
config_id: Optional[pulumi.Input[int]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AppSecByPassNetworkList resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] bypass_network_lists: . JSON array of network IDs that comprise the bypass list.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the network bypass lists being modified.
:param pulumi.Input[str] security_policy_id: The ID of the security policy governing the bypass network lists
"""
if bypass_network_lists is not None:
pulumi.set(__self__, "bypass_network_lists", bypass_network_lists)
if config_id is not None:
pulumi.set(__self__, "config_id", config_id)
if security_policy_id is not None:
pulumi.set(__self__, "security_policy_id", security_policy_id)
@property
@pulumi.getter(name="bypassNetworkLists")
def bypass_network_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
. JSON array of network IDs that comprise the bypass list.
"""
return pulumi.get(self, "bypass_network_lists")
@bypass_network_lists.setter
def bypass_network_lists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "bypass_network_lists", value)
@property
@pulumi.getter(name="configId")
def config_id(self) -> Optional[pulumi.Input[int]]:
"""
. Unique identifier of the security configuration associated with the network bypass lists being modified.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the security policy governing the bypass network lists
"""
return pulumi.get(self, "security_policy_id")
@security_policy_id.setter
def security_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_policy_id", value)
class AppSecByPassNetworkList(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bypass_network_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
config_id: Optional[pulumi.Input[int]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
**Scopes**: Security configuration
Specifies the networks that appear on the bypass network list. Networks on this list are allowed to bypass the Web Application Firewall.
Note that this resource is only applicable to WAP (Web Application Protector) configurations.
**Related API Endpoint**: [/appsec/v1/configs/{configId}/versions/{versionNumber}/bypass-network-lists](https://developer.akamai.com/api/cloud_security/application_security/v1.html#putbypassnetworklistsforawapconfigversion)
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
configuration = akamai.get_app_sec_configuration(name="Documentation")
bypass_network_lists = akamai.AppSecByPassNetworkList("bypassNetworkLists",
config_id=configuration.config_id,
bypass_network_lists=[
"DocumentationNetworkList",
"TrainingNetworkList",
])
```
## Output Options
The following options can be used to determine the information returned, and how that returned information is formatted:
- `output_text`. Tabular report showing the updated list of bypass network IDs.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] bypass_network_lists: . JSON array of network IDs that comprise the bypass list.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the network bypass lists being modified.
:param pulumi.Input[str] security_policy_id: The ID of the security policy governing the bypass network lists
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AppSecByPassNetworkListArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
**Scopes**: Security configuration
Specifies the networks that appear on the bypass network list. Networks on this list are allowed to bypass the Web Application Firewall.
Note that this resource is only applicable to WAP (Web Application Protector) configurations.
**Related API Endpoint**: [/appsec/v1/configs/{configId}/versions/{versionNumber}/bypass-network-lists](https://developer.akamai.com/api/cloud_security/application_security/v1.html#putbypassnetworklistsforawapconfigversion)
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
configuration = akamai.get_app_sec_configuration(name="Documentation")
bypass_network_lists = akamai.AppSecByPassNetworkList("bypassNetworkLists",
config_id=configuration.config_id,
bypass_network_lists=[
"DocumentationNetworkList",
"TrainingNetworkList",
])
```
## Output Options
The following options can be used to determine the information returned, and how that returned information is formatted:
- `output_text`. Tabular report showing the updated list of bypass network IDs.
:param str resource_name: The name of the resource.
:param AppSecByPassNetworkListArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AppSecByPassNetworkListArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bypass_network_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
config_id: Optional[pulumi.Input[int]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AppSecByPassNetworkListArgs.__new__(AppSecByPassNetworkListArgs)
if bypass_network_lists is None and not opts.urn:
raise TypeError("Missing required property 'bypass_network_lists'")
__props__.__dict__["bypass_network_lists"] = bypass_network_lists
if config_id is None and not opts.urn:
raise TypeError("Missing required property 'config_id'")
__props__.__dict__["config_id"] = config_id
__props__.__dict__["security_policy_id"] = security_policy_id
super(AppSecByPassNetworkList, __self__).__init__(
'akamai:index/appSecByPassNetworkList:AppSecByPassNetworkList',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
bypass_network_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
config_id: Optional[pulumi.Input[int]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None) -> 'AppSecByPassNetworkList':
"""
Get an existing AppSecByPassNetworkList resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] bypass_network_lists: . JSON array of network IDs that comprise the bypass list.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the network bypass lists being modified.
:param pulumi.Input[str] security_policy_id: The ID of the security policy governing the bypass network lists
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AppSecByPassNetworkListState.__new__(_AppSecByPassNetworkListState)
__props__.__dict__["bypass_network_lists"] = bypass_network_lists
__props__.__dict__["config_id"] = config_id
__props__.__dict__["security_policy_id"] = security_policy_id
return AppSecByPassNetworkList(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="bypassNetworkLists")
def bypass_network_lists(self) -> pulumi.Output[Sequence[str]]:
"""
. JSON array of network IDs that comprise the bypass list.
"""
return pulumi.get(self, "bypass_network_lists")
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Output[int]:
"""
. Unique identifier of the security configuration associated with the network bypass lists being modified.
"""
return pulumi.get(self, "config_id")
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the security policy governing the bypass network lists
"""
return pulumi.get(self, "security_policy_id")
| 46.24
| 231
| 0.680507
| 1,582
| 13,872
| 5.713654
| 0.123894
| 0.068149
| 0.089612
| 0.035955
| 0.792455
| 0.772762
| 0.762806
| 0.751521
| 0.735258
| 0.728399
| 0
| 0.000469
| 0.232122
| 13,872
| 299
| 232
| 46.394649
| 0.848104
| 0.391724
| 0
| 0.59589
| 1
| 0
| 0.120942
| 0.020288
| 0
| 0
| 0
| 0
| 0
| 1
| 0.150685
| false
| 0.273973
| 0.034247
| 0
| 0.273973
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
db2c344703174ecd5338a33cda3c145653f3b605
| 10,714
|
py
|
Python
|
backend/flourish/migrations/0001_initial.py
|
huanglianqi/ricite
|
df493314bcdc9c6cf184a334260c02bf0bbe1f0f
|
[
"MIT"
] | 3
|
2020-12-07T02:46:47.000Z
|
2021-04-25T00:54:20.000Z
|
backend/flourish/migrations/0001_initial.py
|
huanglianqi/ricite
|
df493314bcdc9c6cf184a334260c02bf0bbe1f0f
|
[
"MIT"
] | 5
|
2021-03-19T03:53:43.000Z
|
2022-01-27T16:12:58.000Z
|
backend/flourish/migrations/0001_initial.py
|
huanglianqi/ricite
|
df493314bcdc9c6cf184a334260c02bf0bbe1f0f
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.6 on 2020-08-20 07:21
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FeedbackForm',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('feedback_id', models.CharField(max_length=100)),
('create_time', models.DateTimeField(blank=True, null=True)),
('class_num', models.CharField(max_length=100)),
('_user_course_id', models.CharField(max_length=100)),
('user_id', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Share',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('like', models.BooleanField(default=False)),
('user_id', models.CharField(max_length=100)),
('moment_id', models.CharField(max_length=100)),
('content', models.TextField(blank=True, null=True)),
('create_time', models.DateTimeField(blank=True, null=True)),
('user_real_name', models.CharField(blank=True, max_length=100, null=True)),
('user_name', models.CharField(blank=True, max_length=100, null=True)),
],
),
migrations.CreateModel(
name='Teacher',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_id', models.CharField(max_length=100)),
('name', models.CharField(blank=True, max_length=100, null=True)),
('real_name', models.CharField(blank=True, max_length=100, null=True)),
('phone', models.CharField(blank=True, max_length=100, null=True)),
('head_img', models.URLField(blank=True, null=True)),
('create_time', models.DateTimeField(blank=True, null=True)),
('is_active', models.BooleanField()),
('has_course', models.BooleanField()),
('is_reapply', models.BooleanField()),
('is_fake', models.BooleanField()),
],
),
migrations.CreateModel(
name='UserCourse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_course_id', models.CharField(max_length=100)),
('user_id', models.CharField(max_length=100)),
('tag_name', models.CharField(max_length=100)),
('course_num', models.IntegerField()),
('term_num', models.CharField(max_length=100)),
('is_fake', models.BooleanField(default=False)),
('has_protocal', models.BooleanField(default=False)),
('feedback_num', models.IntegerField()),
('finish_final', models.BooleanField()),
('mailBack', models.BooleanField(default=False)),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='userCourses', to='flourish.Teacher')),
],
),
migrations.CreateModel(
name='SharePic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(blank=True, null=True)),
('like', models.BooleanField(default=False)),
('moment_id', models.CharField(blank=True, max_length=100, null=True)),
('user_real_name', models.CharField(blank=True, max_length=100, null=True)),
('user_name', models.CharField(blank=True, max_length=100, null=True)),
('user_id', models.CharField(blank=True, max_length=100, null=True)),
('belongTo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sharePics', to='flourish.Share')),
],
),
migrations.CreateModel(
name='ShareLike',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_id', models.CharField(blank=True, max_length=100, null=True)),
('user_name', models.CharField(blank=True, max_length=100, null=True)),
('user_real_name', models.CharField(blank=True, max_length=100, null=True)),
('moment_id', models.CharField(blank=True, max_length=100, null=True)),
('share', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='shareLikes', to='flourish.Share')),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flourish.Teacher')),
],
),
migrations.CreateModel(
name='ShareComment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('user_id', models.CharField(blank=True, max_length=100, null=True)),
('user_name', models.CharField(blank=True, max_length=100, null=True)),
('user_real_name', models.CharField(blank=True, max_length=100, null=True)),
('moment_id', models.CharField(blank=True, max_length=100, null=True)),
('share', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='shareComments', to='flourish.Share')),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flourish.Teacher')),
],
),
migrations.AddField(
model_name='share',
name='teacher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flourish.Teacher'),
),
migrations.CreateModel(
name='InfoForm',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field_id', models.CharField(max_length=100)),
('field_name', models.TextField(blank=True, null=True)),
('field_value', models.TextField(blank=True, null=True)),
('create_time', models.DateTimeField(blank=True, null=True)),
('user_id', models.CharField(max_length=100)),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='infoForms', to='flourish.Teacher')),
],
),
migrations.CreateModel(
name='FeedbackUnit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field_id', models.CharField(max_length=100)),
('field_name', models.TextField(blank=True, null=True)),
('field_value', models.TextField(blank=True, null=True)),
('feedback_id', models.CharField(max_length=100)),
('_user_course_id', models.CharField(max_length=100)),
('user_id', models.CharField(max_length=100)),
('create_time', models.DateTimeField(blank=True, null=True)),
('feedback_form', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feedback_units', to='flourish.FeedbackForm')),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flourish.Teacher')),
('user_course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flourish.UserCourse')),
],
),
migrations.CreateModel(
name='FeedbackPic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pic_url', models.URLField(blank=True, null=True)),
('feedback_id', models.CharField(max_length=100)),
('_user_course_id', models.CharField(max_length=100)),
('user_id', models.CharField(max_length=100)),
('like', models.BooleanField(default=False)),
('create_time', models.DateTimeField(blank=True, null=True)),
('feedback_form', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feedback_pics', to='flourish.FeedbackForm')),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flourish.Teacher')),
('user_course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feedback_pics', to='flourish.UserCourse')),
],
),
migrations.AddField(
model_name='feedbackform',
name='teacher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flourish.Teacher'),
),
migrations.AddField(
model_name='feedbackform',
name='user_course',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feedback_forms', to='flourish.UserCourse'),
),
migrations.CreateModel(
name='ApplyCourse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('stu_num', models.IntegerField(blank=True, null=True)),
('teacher_num', models.IntegerField(blank=True, null=True)),
('front_img', models.URLField(blank=True, null=True)),
('back_img', models.URLField(blank=True, null=True)),
('school_name', models.CharField(blank=True, max_length=100, null=True)),
('school_address', models.CharField(blank=True, max_length=200, null=True)),
('_user_course_id', models.CharField(max_length=100)),
('user_id', models.CharField(max_length=100)),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='applyCourses', to='flourish.Teacher')),
('user_course', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='applycourse', to='flourish.UserCourse')),
],
),
]
| 57.913514
| 157
| 0.594176
| 1,113
| 10,714
| 5.546271
| 0.101527
| 0.099627
| 0.077758
| 0.085534
| 0.842216
| 0.821319
| 0.765106
| 0.722177
| 0.722177
| 0.707274
| 0
| 0.017332
| 0.25686
| 10,714
| 184
| 158
| 58.228261
| 0.757975
| 0.0042
| 0
| 0.649718
| 1
| 0
| 0.135183
| 0.003937
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011299
| 0
| 0.033898
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1d42fe05ca4a3d33e7a8b8ed0fd28d54acb7d1b
| 8,875
|
py
|
Python
|
drivers/eza2500/command0601.py
|
jinupygogo/apis-dcdc_batt_comm
|
7fc4317df414d1b4a4efea271605a52d6aae950b
|
[
"Apache-2.0"
] | 3
|
2020-12-01T04:30:12.000Z
|
2021-12-28T02:42:44.000Z
|
drivers/eza2500/command0601.py
|
jinupygogo/apis-dcdc_batt_comm
|
7fc4317df414d1b4a4efea271605a52d6aae950b
|
[
"Apache-2.0"
] | null | null | null |
drivers/eza2500/command0601.py
|
jinupygogo/apis-dcdc_batt_comm
|
7fc4317df414d1b4a4efea271605a52d6aae950b
|
[
"Apache-2.0"
] | 2
|
2020-12-01T14:07:48.000Z
|
2021-02-19T07:10:23.000Z
|
# -*- coding: utf-8 -*-
from struct import pack, unpack
import os
from essx import essx_debug
from essx.essx_exception import ESSXDeviceException, ESSXValueException, ESSXParameterException, ESSXException
from eza2500 import eza2500_base
from eza2500 import eza2500_util
class Command0601(eza2500_base.EZA2500CommandBase):
""" EZA2500 6-1 """
COMMAND = 28
CMD_LEN = 0
ACK_LEN = 12
NAK_LEN = 2
def __init__(self, device):
super(Command0601, self).__init__(device)
self.response = {}
def pack_senddata(self, ad1, ad2, params = {}):
req = pack("<BBBBB", 0x05 ,self.CMD_LEN ,ad1 ,ad2 ,28) + b"00"
return eza2500_util.replace_check_sum(req)
def send(self, ad1, ad2, params = {}):
send_data = self.pack_senddata(ad1, ad2, params)
essx_debug.log('send')
essx_debug.dump(send_data)
self.device.write(send_data)
return send_data
def recv(self):
essx_debug.log('recv')
recv_data = self._recv()
self.response_raw = recv_data
res = {}
(_sfd, _len, _ad1, _ad2, _cmd) = unpack("BBBBB", recv_data[0:5])
if _cmd == 0x1c: #ACK
(_cib ,_dig ,_ubv ,_ugv ,_obv ,_ogv ,_chksum) = unpack("<HHHHHHH", recv_data[5:])
_cib = eza2500_util.q_denormalize(_cib, 13, '52.08', '0', '56.77', 'cib')
_dig = eza2500_util.q_denormalize(_dig, 13, '7.8125', '0', '8.5162', 'dig')
_ubv = eza2500_util.q_denormalize(_ubv, 14, '48', '32', '68', 'ubv')
_ugv = eza2500_util.q_denormalize(_ugv, 14, '380', '260', '425', 'ugv')
_obv = eza2500_util.q_denormalize(_obv, 14, '48', '32', '68', 'obv')
_ogv = eza2500_util.q_denormalize(_ogv, 14, '380', '260', '425', 'ogv')
res["cib"] = _cib
res["dig"] = _dig
res["ubv"] = _ubv
res["ugv"] = _ugv
res["obv"] = _obv
res["ogv"] = _ogv
res["chksum"] = _chksum
self.response = res
elif _cmd == 0x9c: #NAK
(_ercd ,_chksum) = unpack("<HH", recv_data[5:])
res["ercd"] = _ercd
res["chksum"] = _chksum
self.response = res
raise ESSXDeviceException("error: ERCD=%x" % _ercd)
else:
raise ESSXValueException("bad response")
self.response = res
essx_debug.log('recv')
#essx_debug.dump(recv_data)
return recv_data
@classmethod
def unit_test(cls, dev = None, params = None):
from io import BytesIO
class Dummy:
def __init__(self):
_cib = 28.385
_cib = int(eza2500_util.q_normalize(_cib, 13, '52.08', '0', '56.77', 'cib'))
_dig = 4.2581
_dig = int(eza2500_util.q_normalize(_dig, 13, '7.8125', '0', '8.5162', 'dig'))
_ubv = 50.0
_ubv = int(eza2500_util.q_normalize(_ubv, 14, '48', '32', '68', 'ubv'))
_ugv = 342.5
_ugv = int(eza2500_util.q_normalize(_ugv, 14, '380', '260', '425', 'ugv'))
_obv = 50.0
_obv = int(eza2500_util.q_normalize(_obv, 14, '48', '32', '68', 'obv'))
_ogv = 342.5
_ogv = int(eza2500_util.q_normalize(_ogv, 14, '380', '260', '425', 'ogv'))
_chksum = 0
data = pack("<BBBBBHHHHHHH", 2, Command0601.ACK_LEN, 1, 2, 0x1c, _cib ,_dig ,_ubv ,_ugv ,_obv ,_ogv ,_chksum)
_chksum = eza2500_util.calc_check_sum(data)
self.reader = BytesIO(data[:-2] + pack('BB', _chksum % 256, _chksum // 256))
def read(self, bytes):
return self.reader.read(bytes)
def write(self, data):
essx_debug.dump(data)
if dev == None:
dev = Dummy()
cmd = Command0601(dev)
if params == None:
params = {}
cmd.send(1, 2, params)
cmd.recv()
class Command0604(eza2500_base.EZA2500CommandBase):
""" EZA2500 6-4 """
COMMAND = 28
CMD_LEN = 12
ACK_LEN = 12
NAK_LEN = 2
def __init__(self, device):
super(Command0604, self).__init__(device)
self.response = {}
def pack_senddata(self, ad1, ad2, params = {}):
if 'cib' in params:
_cib = params['cib']
else:
raise ESSXParameterException('no parameter: cib')
if 'dig' in params:
_dig = params['dig']
else:
raise ESSXParameterException('no parameter: dig')
if 'ubv' in params:
_ubv = params['ubv']
else:
raise ESSXParameterException('no parameter: ubv')
if 'ugv' in params:
_ugv = params['ugv']
else:
raise ESSXParameterException('no parameter: ugv')
if 'obv' in params:
_obv = params['obv']
else:
raise ESSXParameterException('no parameter: obv')
if 'ogv' in params:
_ogv = params['ogv']
else:
raise ESSXParameterException('no parameter: ogv')
_cib = int(eza2500_util.q_normalize(_cib, 13, '52.08', '0', '56.77', 'cib'))
_dig = int(eza2500_util.q_normalize(_dig, 13, '7.8125', '0', '8.5162', 'dig'))
_ubv = int(eza2500_util.q_normalize(_ubv, 14, '48', '32', '68', 'ubv'))
_ugv = int(eza2500_util.q_normalize(_ugv, 14, '380', '260', '425', 'ugv'))
_obv = int(eza2500_util.q_normalize(_obv, 14, '48', '32', '68', 'obv'))
_ogv = int(eza2500_util.q_normalize(_ogv, 14, '380', '260', '425', 'ogv'))
req = pack("<BBBBBHHHHHH", 0x05 ,self.CMD_LEN ,ad1 ,ad2 ,28 ,_cib ,_dig ,_ubv ,_ugv ,_obv ,_ogv) + b"00"
return eza2500_util.replace_check_sum(req)
def send(self, ad1, ad2, params = {}):
send_data = self.pack_senddata(ad1, ad2, params)
essx_debug.log('send')
essx_debug.dump(send_data)
self.device.write(send_data)
return send_data
def recv(self):
essx_debug.log('recv')
recv_data = self._recv()
self.response_raw = recv_data
res = {}
(_sfd, _len, _ad1, _ad2, _cmd) = unpack("BBBBB", recv_data[0:5])
if _cmd == 0x1c: #ACK
(_cib ,_dig ,_ubv ,_ugv ,_obv ,_ogv ,_chksum) = unpack("<HHHHHHH", recv_data[5:])
_cib = eza2500_util.q_denormalize(_cib, 13, '52.08', '0', '56.77', 'cib')
_dig = eza2500_util.q_denormalize(_dig, 13, '7.8125', '0', '8.5162', 'dig')
_ubv = eza2500_util.q_denormalize(_ubv, 14, '48', '32', '68', 'ubv')
_ugv = eza2500_util.q_denormalize(_ugv, 14, '380', '260', '425', 'ugv')
_obv = eza2500_util.q_denormalize(_obv, 14, '48', '32', '68', 'obv')
_ogv = eza2500_util.q_denormalize(_ogv, 14, '380', '260', '425', 'ogv')
res["cib"] = _cib
res["dig"] = _dig
res["ubv"] = _ubv
res["ugv"] = _ugv
res["obv"] = _obv
res["ogv"] = _ogv
res["chksum"] = _chksum
self.response = res
elif _cmd == 0x9c: #NAK
(_ercd ,_chksum) = unpack("<HH", recv_data[5:])
res["ercd"] = _ercd
res["chksum"] = _chksum
self.response = res
raise ESSXDeviceException("error: ERCD=%x" % _ercd)
else:
raise ESSXValueException("bad response")
self.response = res
essx_debug.log('recv')
#essx_debug.dump(recv_data)
return recv_data
@classmethod
def unit_test(cls, dev = None, params = None):
from io import BytesIO
class Dummy:
def __init__(self):
_cib = 28.385
_cib = int(eza2500_util.q_normalize(_cib, 13, '52.08', '0', '56.77', 'cib'))
_dig = 4.2581
_dig = int(eza2500_util.q_normalize(_dig, 13, '7.8125', '0', '8.5162', 'dig'))
_ubv = 50.0
_ubv = int(eza2500_util.q_normalize(_ubv, 14, '48', '32', '68', 'ubv'))
_ugv = 342.5
_ugv = int(eza2500_util.q_normalize(_ugv, 14, '380', '260', '425', 'ugv'))
_obv = 50.0
_obv = int(eza2500_util.q_normalize(_obv, 14, '48', '32', '68', 'obv'))
_ogv = 342.5
_ogv = int(eza2500_util.q_normalize(_ogv, 14, '380', '260', '425', 'ogv'))
_chksum = 0
data = pack("<BBBBBHHHHHHH", 2, Command0604.ACK_LEN, 1, 2, 0x1c, _cib ,_dig ,_ubv ,_ugv ,_obv ,_ogv ,_chksum)
_chksum = eza2500_util.calc_check_sum(data)
self.reader = BytesIO(data[:-2] + pack('BB', _chksum % 256, _chksum // 256))
def read(self, bytes):
return self.reader.read(bytes)
def write(self, data):
essx_debug.dump(data)
if dev == None:
dev = Dummy()
cmd = Command0604(dev)
if params == None:
params = {}
_cib = 28.385
params['cib'] = _cib
_dig = 4.2581
params['dig'] = _dig
_ubv = 50.0
params['ubv'] = _ubv
_ugv = 342.5
params['ugv'] = _ugv
_obv = 50.0
params['obv'] = _obv
_ogv = 342.5
params['ogv'] = _ogv
cmd.send(1, 2, params)
cmd.recv()
#単体テストをするにはPYTHONPATHに一つ上のディレクトリを指定すること
if __name__ == "__main__":
import sys
#import serial
import essx
from eza2500_device import EZA2500Device
if len(sys.argv) > 1 and sys.argv[1] == '1':
ser_dev = essx.essx_rs232c.ESSXRS232C('/dev/cuaU1', 115200)
dev = EZA2500Device(dev = ser_dev, timeout = 1)
else:
dev = None
try:
Command0601.unit_test(dev)
except ESSXException as err:
print(err.reason)
raise err
try:
Command0604.unit_test(dev)
except ESSXException as err:
print(err.reason)
raise err
| 33.490566
| 117
| 0.600901
| 1,214
| 8,875
| 4.11285
| 0.120264
| 0.077108
| 0.072101
| 0.054076
| 0.815141
| 0.741438
| 0.737833
| 0.720208
| 0.720208
| 0.720208
| 0
| 0.107249
| 0.23831
| 8,875
| 264
| 118
| 33.617424
| 0.631361
| 0.018366
| 0
| 0.774892
| 0
| 0
| 0.087792
| 0
| 0
| 0
| 0.003682
| 0
| 0
| 1
| 0.069264
| false
| 0
| 0.047619
| 0.008658
| 0.203463
| 0.008658
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1dfd65f88b6e83d1997ce80ba5a78895d3d3a27
| 2,323
|
py
|
Python
|
tests/integration/operators_test/gt_lt_test.py
|
gglin001/popart
|
3225214343f6d98550b6620e809a3544e8bcbfc6
|
[
"MIT"
] | 61
|
2020-07-06T17:11:46.000Z
|
2022-03-12T14:42:51.000Z
|
tests/integration/operators_test/gt_lt_test.py
|
gglin001/popart
|
3225214343f6d98550b6620e809a3544e8bcbfc6
|
[
"MIT"
] | 1
|
2021-02-25T01:30:29.000Z
|
2021-11-09T11:13:14.000Z
|
tests/integration/operators_test/gt_lt_test.py
|
gglin001/popart
|
3225214343f6d98550b6620e809a3544e8bcbfc6
|
[
"MIT"
] | 6
|
2020-07-15T12:33:13.000Z
|
2021-11-07T06:55:00.000Z
|
# Copyright (c) 2019 Graphcore Ltd. All rights reserved.
import numpy as np
import popart
import torch
import pytest
from op_tester import op_tester
def test_greater(op_tester):
d1 = np.random.rand(2).astype(np.float32)
d2 = np.random.rand(2).astype(np.float32)
def init_builder(builder):
i1 = builder.addInputTensor(d1)
i2 = builder.addInputTensor(d2)
o = builder.aiOnnx.greater([i1, i2])
builder.addOutputTensor(o)
return [o]
def reference(ref_data):
t1 = torch.tensor(d1)
t2 = torch.tensor(d2)
out = torch.gt(t1, t2)
return [out]
op_tester.run(init_builder, reference, step_type='infer')
def test_broadcast_greater(op_tester):
d1 = np.random.rand(2, 2).astype(np.float32)
d2 = np.random.rand(2).astype(np.float32)
def init_builder(builder):
i1 = builder.addInputTensor(d1)
i2 = builder.addInputTensor(d2)
o = builder.aiOnnx.greater([i1, i2])
builder.addOutputTensor(o)
return [o]
def reference(ref_data):
t1 = torch.tensor(d1)
t2 = torch.tensor(d2)
out = torch.gt(t1, t2)
return [out]
op_tester.run(init_builder, reference, step_type='infer')
def test_less(op_tester):
d1 = np.random.rand(2).astype(np.float32)
d2 = np.random.rand(2).astype(np.float32)
def init_builder(builder):
i1 = builder.addInputTensor(d1)
i2 = builder.addInputTensor(d2)
o = builder.aiOnnx.less([i1, i2])
builder.addOutputTensor(o)
return [o]
def reference(ref_data):
t1 = torch.tensor(d1)
t2 = torch.tensor(d2)
out = torch.lt(t1, t2)
return [out]
op_tester.run(init_builder, reference, step_type='infer')
def test_broadcast_less(op_tester):
d1 = np.random.rand(2, 2).astype(np.float32)
d2 = np.random.rand(2).astype(np.float32)
def init_builder(builder):
i1 = builder.addInputTensor(d1)
i2 = builder.addInputTensor(d2)
o = builder.aiOnnx.less([i1, i2])
builder.addOutputTensor(o)
return [o]
def reference(ref_data):
t1 = torch.tensor(d1)
t2 = torch.tensor(d2)
out = torch.lt(t1, t2)
return [out]
op_tester.run(init_builder, reference, step_type='infer')
| 25.527473
| 61
| 0.626345
| 316
| 2,323
| 4.503165
| 0.164557
| 0.056219
| 0.067463
| 0.073085
| 0.911455
| 0.911455
| 0.911455
| 0.911455
| 0.895994
| 0.895994
| 0
| 0.049255
| 0.248386
| 2,323
| 90
| 62
| 25.811111
| 0.76575
| 0.023246
| 0
| 0.861538
| 0
| 0
| 0.008822
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.184615
| false
| 0
| 0.076923
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c0459e14f4bb29676ef72a097f9fcd1fb9920615
| 8,727
|
py
|
Python
|
backend/api/models.py
|
MimbleWimble-Grin/grin-testnet-deposit-withdraw
|
7943f654b0a6d79d9a31c9719366e9df55d6816c
|
[
"MIT"
] | 6
|
2021-03-11T21:02:21.000Z
|
2022-02-06T20:53:20.000Z
|
backend/api/models.py
|
pkariz/grin-testnet-exchange
|
b5c7a5b6322f60348e3b3db563183e2d6d2da234
|
[
"MIT"
] | 1
|
2021-03-12T12:10:19.000Z
|
2021-03-12T12:20:32.000Z
|
backend/api/models.py
|
MimbleWimble-Grin/grin-testnet-deposit-withdraw
|
7943f654b0a6d79d9a31c9719366e9df55d6816c
|
[
"MIT"
] | 3
|
2021-03-12T16:42:03.000Z
|
2021-04-19T07:11:33.000Z
|
from django.conf import settings
from django.contrib.auth.models import User
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models, transaction
from guardian.shortcuts import assign_perm
from model_utils.models import TimeStampedModel
class Currency(TimeStampedModel):
name = models.CharField(unique=True, max_length=255)
symbol = models.SlugField(unique=True, max_length=255)
class Meta:
verbose_name_plural = "currencies"
def __str__(self):
return self.name
class Balance(TimeStampedModel):
currency = models.ForeignKey(
Currency,
related_name='balances',
on_delete=models.CASCADE,
)
user = models.ForeignKey(
User,
related_name='balances',
on_delete=models.CASCADE,
)
amount = models.DecimalField(
max_digits=30,
decimal_places=9,
default=0.0,
validators=[MinValueValidator(0), MaxValueValidator(2**64)]
)
locked_amount = models.DecimalField(
max_digits=30,
decimal_places=9,
default=0.0,
validators=[MinValueValidator(0), MaxValueValidator(2**64)]
)
def __str__(self):
return '{}, total: {}, locked: {}, user: {}'.format(
self.currency.symbol,
self.amount,
self.locked_amount,
self.user.username
)
@transaction.atomic
def save(self, *args, **kwargs):
"""Needed to manually run validators on amounts."""
# full_clean runs validators
self.full_clean()
return super().save(*args, **kwargs)
class Deposit(TimeStampedModel):
STATUSES = (
('awaiting transaction signature', 'awaiting transaction signature'),
('awaiting confirmation', 'awaiting confirmation'),
('finished', 'finished'),
('canceled', 'canceled'),
)
balance = models.ForeignKey(
Balance,
related_name='deposits',
on_delete=models.CASCADE,
)
amount = models.DecimalField(
max_digits=30,
decimal_places=9,
default=0.0,
validators=[MinValueValidator(0), MaxValueValidator(2**64)]
)
status = models.CharField(max_length=255, choices=STATUSES)
confirmations = models.IntegerField(
validators=[MinValueValidator(0)],
default=0
)
# tx_slate_id is needed in case when we want to cancel the deposit after the
# first step of RSR (eg. when new deposit is initiated)
tx_slate_id = models.CharField(unique=True, max_length=255)
# we store kernel excess to update number of confirmations
kernel_excess = models.CharField(
unique=True, null=True, blank=True, max_length=255)
class Meta:
ordering = ['created']
def __str__(self):
return '{}, amount: {}, status: {}, user:{}'.format(
self.balance.currency.symbol,
self.amount,
self.status,
self.balance.user.username
)
@transaction.atomic
def save(self, *args, **kwargs):
"""On deposit create set permissions"""
created = self.pk is None
if not created:
current_deposit = Deposit.objects.get(pk=self.pk)
if (
current_deposit.status == 'awaiting transaction signature' and
self.status == 'awaiting confirmation'
):
# finished the transaction, lock amount in balance
balance = self.balance
balance.locked_amount = balance.locked_amount + self.amount
balance.save()
elif (
self.status == 'awaiting confirmation' and
self.confirmations == settings.REQUIRED_CONFIRMATIONS
):
self.status = 'finished'
# deposit completed, transfer locked amount to available amount
balance = self.balance
balance.locked_amount = balance.locked_amount - self.amount
balance.amount = balance.amount + self.amount
balance.save()
# full_clean runs validators
self.full_clean()
res = super().save(*args, **kwargs)
if created:
assign_perm('api.view_deposit', self.balance.user, self)
return res
@transaction.atomic
def delete(self, **kwargs):
# we need to remove locked amount from balance if anything is locked
# NOTE: this can also be called on an already confirmed deposit in
# which case nothing is locked
if self.status == 'awaiting confirmation':
# it means it's still waiting signature or confirmations in which
# case deposit's amount is locked in its balance
balance = self.balance
balance.locked_amount = balance.locked_amount - self.amount
balance.save()
# NOTE: we should cancel tx here, but it's more explicit to do it
# in the view. The downside is that when we delete it through a
# shell we need to manually cancel the transaction
return super().delete(**kwargs)
class Withdrawal(TimeStampedModel):
STATUSES = (
('awaiting transaction signature', 'awaiting transaction signature'),
('awaiting confirmation', 'awaiting confirmation'),
('finished', 'finished'),
('canceled', 'canceled'),
)
balance = models.ForeignKey(
Balance,
related_name='withdrawals',
on_delete=models.CASCADE,
)
amount = models.DecimalField(
max_digits=30,
decimal_places=9,
default=0.0,
validators=[MinValueValidator(0), MaxValueValidator(2**64)]
)
status = models.CharField(max_length=255, choices=STATUSES)
confirmations = models.IntegerField(
validators=[MinValueValidator(0)],
default=0
)
# tx_slate_id is needed in case when we want to cancel the withdrawal after
# the first step of SRS (eg. when new withdrawal is initiated)
tx_slate_id = models.CharField(unique=True, max_length=255)
# we store kernel excess to update number of confirmations
kernel_excess = models.CharField(
unique=True, null=True, blank=True, max_length=255)
class Meta:
ordering = ['created']
def __str__(self):
return '{}, amount: {}, status: {}, user:{}'.format(
self.balance.currency.symbol,
self.amount,
self.status,
self.balance.user.username
)
@transaction.atomic
def save(self, *args, **kwargs):
"""On withdrawal create set permissions"""
created = self.pk is None
if not created:
current_withdrawal = Withdrawal.objects.get(pk=self.pk)
if (
current_withdrawal.status == 'awaiting transaction signature' and
self.status == 'awaiting confirmation'
):
# finished the transaction, lock amount in balance
balance = self.balance
balance.locked_amount = balance.locked_amount + self.amount
balance.amount = balance.amount - self.amount
balance.save()
elif (
self.status == 'awaiting confirmation' and
self.confirmations == settings.REQUIRED_CONFIRMATIONS
):
self.status = 'finished'
# withdrawal completed, remove locked amount
balance = self.balance
balance.locked_amount = balance.locked_amount - self.amount
balance.save()
# full_clean runs validators
self.full_clean()
res = super().save(*args, **kwargs)
if created:
assign_perm('api.view_withdrawal', self.balance.user, self)
return res
@transaction.atomic
def delete(self, **kwargs):
# we need to remove locked amount from balance if anything is locked
# NOTE: this can also be called on an already confirmed withdrawal in
# which case nothing is locked
if self.status == 'awaiting confirmation':
# the withdrawal's amount is locked in its balance, return it to
# the available balance
balance = self.balance
balance.locked_amount = balance.locked_amount - self.amount
balance.amount = balance.amount + self.amount
balance.save()
# NOTE: we should cancel tx here, but it's more explicit to do it
# in the view. The downside is that when we delete it through a
# shell we need to manually cancel the transaction
return super().delete(**kwargs)
| 35.91358
| 81
| 0.611436
| 939
| 8,727
| 5.593184
| 0.182109
| 0.049505
| 0.043412
| 0.039414
| 0.82083
| 0.807121
| 0.802361
| 0.757997
| 0.757997
| 0.748477
| 0
| 0.010459
| 0.298843
| 8,727
| 242
| 82
| 36.061983
| 0.847851
| 0.189068
| 0
| 0.73913
| 0
| 0
| 0.095123
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048913
| false
| 0
| 0.032609
| 0.021739
| 0.277174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c066cce923151c18c1f271885e4c9113464ebe9f
| 15,004
|
py
|
Python
|
data_generation/common_nlu/object_stories.py
|
ivanmkc/helpdesk-assistant
|
0697d7e2d0dca77dc4c8043ec61e62350dbd4a8a
|
[
"Apache-2.0"
] | null | null | null |
data_generation/common_nlu/object_stories.py
|
ivanmkc/helpdesk-assistant
|
0697d7e2d0dca77dc4c8043ec61e62350dbd4a8a
|
[
"Apache-2.0"
] | null | null | null |
data_generation/common_nlu/object_stories.py
|
ivanmkc/helpdesk-assistant
|
0697d7e2d0dca77dc4c8043ec61e62350dbd4a8a
|
[
"Apache-2.0"
] | null | null | null |
from rasa.shared.nlu.state_machine.state_machine_models import (
ActionName,
Intent,
)
from data_generation.common_nlu import common_intent_creators
from data_generation.models.story_models import (
SlotWasSet,
Story,
Or,
)
import actions.find_objects_action as find_objects_action
import actions.say_object_intros as say_object_intros
from actions import (
find_objects_action,
get_object_info,
question_answer_action,
)
utter_no_objects_found = ActionName(
question_answer_action.ACTION_NAME
) # Utterance("None objects found")
# Stories for returning attribute given existing context
stories = []
# Find object with type stories
find_object_creators = [
common_intent_creators.intent_is_there_a_type_creator,
common_intent_creators.intent_is_there_a_place_with_thing_creator,
]
for intent_creator in find_object_creators:
stories += [
# Not found case
Story(
elements=[
Intent(
name=intent_creator.name,
entities=[intent_creator.entity_name],
),
SlotWasSet(
[
intent_creator.entity_name,
]
),
ActionName(find_objects_action.ACTION_NAME),
utter_no_objects_found,
# ActionName(
# action_reset_slots_except_found_object_names.ACTION_NAME
# ),
]
),
# Found case
Story(
elements=[
Intent(
name=intent_creator.name,
entities=[intent_creator.entity_name],
),
SlotWasSet(
[
intent_creator.entity_name,
]
),
ActionName(find_objects_action.ACTION_NAME),
SlotWasSet(
[
find_objects_action.SLOT_FOUND_OBJECT_NAMES,
]
),
ActionName(say_object_intros.ACTION_NAME), # ActionName(
# action_reset_slots_except_found_object_names.ACTION_NAME
# ),
]
),
]
# Handle what about scenarios
# intent_creator = common_intents.intent_what_about_context_creator
# stories.append(
# Story(
# elements=[
# Intent(
# name=intent_creator.name,
# entities=[intent_creator.entity_name],
# ),
# SlotWasSet(
# [
# intent_creator.entity_name,
# ]
# ),
# ActionName(get_object_info.ACTION_NAME),
# ActionName(
# action_reset_slots_except_found_object_names.ACTION_NAME
# ),
# ]
# )
# )
# Get object info stories
for intent_creator in common_intent_creators.intent_creators:
if intent_creator.object_attribute:
slot_set_action_name = f"action_set_{get_object_info.SLOT_OBJECT_ATTRIBUTE}_{intent_creator.object_attribute}"
# Create story
stories += [
# Entities found, objects found
Story(
elements=[
Intent(
name=intent_creator.name,
entities=[intent_creator.entity_name],
),
SlotWasSet(
[
intent_creator.entity_name,
]
),
# Set attribute slot
# Action should be the one created dynamically above
ActionName(slot_set_action_name),
SlotWasSet(
[
# intent_creator.entity_name,
get_object_info.SLOT_OBJECT_ATTRIBUTE,
]
),
# Find the objects
ActionName(find_objects_action.ACTION_NAME),
SlotWasSet(
[
find_objects_action.SLOT_FOUND_OBJECT_NAMES,
]
),
ActionName(get_object_info.ACTION_NAME),
# Reset all irrelevant slots
# ActionName(
# action_reset_slots_except_found_object_names.ACTION_NAME
# ),
]
),
# Entities, no objects found
Story(
elements=[
Intent(
name=intent_creator.name,
entities=[intent_creator.entity_name],
),
SlotWasSet(
[
intent_creator.entity_name,
]
),
# Set attribute slot
# Action should be the one created dynamically above
ActionName(slot_set_action_name),
SlotWasSet(
[
# intent_creator.entity_name,
get_object_info.SLOT_OBJECT_ATTRIBUTE,
]
),
# Find the objects
ActionName(find_objects_action.ACTION_NAME),
ActionName(get_object_info.ACTION_NAME),
# Reset all irrelevant slots
# ActionName(
# action_reset_slots_except_found_object_names.ACTION_NAME
# ),
]
),
# No entities found
Story(
elements=[
Intent(
name=intent_creator.name,
),
# SlotWasSet([intent_creator.entity_name,]),
# Set attribute slot
# Action should be the one created dynamically above
ActionName(slot_set_action_name),
SlotWasSet(
[
# intent_creator.entity_name,
get_object_info.SLOT_OBJECT_ATTRIBUTE,
]
),
# Find the objects
# ActionName(find_objects_action.ACTION_NAME),
ActionName(get_object_info.ACTION_NAME),
# Reset all irrelevant slots
# ActionName(
# action_reset_slots_except_found_object_names.ACTION_NAME
# ),
]
),
]
# I want to buy
intent_creator = common_intent_creators.intent_i_want_to_buy_creator
stories += [
# Entities with number, objects found
Story(
elements=[
Or(
Intent(
name=intent_creator.name,
entities=[intent_creator.entity_name, "number"],
),
Intent(
name=common_intent_creators.intent_context_only_creator.name,
entities=[intent_creator.entity_name, "number"],
),
),
SlotWasSet([intent_creator.entity_name, "number"]),
# Find the objects
ActionName(find_objects_action.ACTION_NAME),
# Found
SlotWasSet(
[
find_objects_action.SLOT_FOUND_OBJECT_NAMES,
]
),
ActionName("action_buy_object"),
]
),
# Entities with number, no objects found
Story(
elements=[
Or(
Intent(
name=intent_creator.name,
entities=[intent_creator.entity_name, "number"],
),
Intent(
name=common_intent_creators.intent_context_only_creator.name,
entities=[intent_creator.entity_name, "number"],
),
),
SlotWasSet([intent_creator.entity_name, "number"]),
# Find the objects
ActionName(find_objects_action.ACTION_NAME),
# TODO: Call a common buy action, it checks 'purchasability' and redirects to the appropriate trigger action
ActionName("action_buy_object"),
]
),
# Entities with number only, objects found
Story(
elements=[
Or(
Intent(
name=intent_creator.name,
entities=["number"],
),
Intent(
name=common_intent_creators.intent_context_only_creator.name,
entities=["number"],
),
),
SlotWasSet([intent_creator.entity_name, "number"]),
# Find the objects
ActionName(find_objects_action.ACTION_NAME),
# Found
SlotWasSet(
[
find_objects_action.SLOT_FOUND_OBJECT_NAMES,
]
),
ActionName("action_buy_object"),
]
),
# Entities with number only, no objects found
Story(
elements=[
Or(
Intent(
name=intent_creator.name,
entities=["number"],
),
Intent(
name=common_intent_creators.intent_context_only_creator.name,
entities=["number"],
),
),
SlotWasSet([intent_creator.entity_name, "number"]),
# Find the objects
ActionName(find_objects_action.ACTION_NAME),
# TODO: Call a common buy action, it checks 'purchasability' and redirects to the appropriate trigger action
ActionName("action_buy_object"),
]
),
# Entities, objects found
Story(
elements=[
Or(
Intent(
name=intent_creator.name,
entities=[intent_creator.entity_name],
),
Intent(
name=common_intent_creators.intent_context_only_creator.name,
entities=[intent_creator.entity_name],
),
),
SlotWasSet(
[
intent_creator.entity_name,
]
),
# Find the objects
ActionName(find_objects_action.ACTION_NAME),
# Found
SlotWasSet(
[
find_objects_action.SLOT_FOUND_OBJECT_NAMES,
]
),
ActionName("action_buy_object"),
]
),
# Entities, no objects found
Story(
elements=[
Or(
Intent(
name=intent_creator.name,
entities=[intent_creator.entity_name],
),
Intent(
name=common_intent_creators.intent_context_only_creator.name,
entities=[intent_creator.entity_name],
),
),
SlotWasSet(
[
intent_creator.entity_name,
]
),
# Find the objects
ActionName(find_objects_action.ACTION_NAME),
# TODO: Call a common buy action, it checks 'purchasability' and redirects to the appropriate trigger action
ActionName("action_buy_object"),
]
),
# # No entities found
# Story(
# elements=[
# Intent(name=intent_creator.name,),
# ActionName("action_buy_object"),
# ]
# ),
]
# # Disambiguation
# intent_creator = common_intent_creators.intent_context_only_creator
# stories += [
# # Entities with number, objects found
# Story(
# elements=[
# Intent(
# name=intent_creator.name,
# entities=[intent_creator.entity_name, "number"],
# ),
# SlotWasSet([intent_creator.entity_name, "number"]),
# # Find the objects
# ActionName(disambiguation_action.ACTION_NAME),
# # Found
# SlotWasSet([find_objects_action.SLOT_FOUND_OBJECT_NAMES,]),
# ActionName("action_buy_object"),
# ]
# ),
# # Entities with number, no objects found
# Story(
# elements=[
# Intent(
# name=intent_creator.name,
# entities=[intent_creator.entity_name, "number"],
# ),
# SlotWasSet([intent_creator.entity_name, "number"]),
# # Find the objects
# ActionName(disambiguation_action.ACTION_NAME),
# # TODO: Call a common buy action, it checks 'purchasability' and redirects to the appropriate trigger action
# ActionName("action_buy_object"),
# ]
# ),
# # Entities, objects found
# Story(
# elements=[
# Intent(
# name=intent_creator.name,
# entities=[intent_creator.entity_name],
# ),
# SlotWasSet([intent_creator.entity_name,]),
# # Find the objects
# ActionName(disambiguation_action.ACTION_NAME),
# # Found
# SlotWasSet([find_objects_action.SLOT_FOUND_OBJECT_NAMES,]),
# ActionName("action_buy_object"),
# ]
# ),
# # Entities, no objects found
# Story(
# elements=[
# Intent(
# name=intent_creator.name,
# entities=[intent_creator.entity_name],
# ),
# SlotWasSet([intent_creator.entity_name,]),
# # Find the objects
# ActionName(disambiguation_action.ACTION_NAME),
# # TODO: Call a common buy action, it checks 'purchasability' and redirects to the appropriate trigger action
# ActionName("action_buy_object"),
# ]
# ),
# # No entities found
# Story(
# elements=[
# Intent(name=intent_creator.name,),
# ActionName("action_buy_object"),
# ]
# ),
# ]
# stories += [
# Story(
# elements=[
# utter_no_objects_found,
# ActionName(action_reset_slots_except_found_object_names.ACTION_NAME),
# ]
# ),
# Story(
# elements=[
# ActionName(get_object_info.ACTION_NAME),
# ActionName(action_reset_slots_except_found_object_names.ACTION_NAME),
# ]
# ),
# Story(
# elements=[
# ActionName(say_object_intros.ACTION_NAME),
# ActionName(action_reset_slots_except_found_object_names.ACTION_NAME),
# ]
# ),
# ]
| 33.491071
| 122
| 0.492535
| 1,201
| 15,004
| 5.80433
| 0.079933
| 0.113757
| 0.098121
| 0.118778
| 0.871037
| 0.865873
| 0.853823
| 0.838761
| 0.836178
| 0.826998
| 0
| 0
| 0.432551
| 15,004
| 447
| 123
| 33.565996
| 0.818769
| 0.360637
| 0
| 0.712062
| 0
| 0
| 0.02745
| 0.008937
| 0
| 0
| 0
| 0.002237
| 0
| 1
| 0
| false
| 0
| 0.023346
| 0
| 0.023346
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2219934cb267cd82f42b3f171d22dacc3b0b273a
| 8,992
|
py
|
Python
|
tests/test_detail.py
|
jwhitlock/drf-json-api
|
a62802432c612c34079f3c3694129f37778e2577
|
[
"MIT"
] | null | null | null |
tests/test_detail.py
|
jwhitlock/drf-json-api
|
a62802432c612c34079f3c3694129f37778e2577
|
[
"MIT"
] | null | null | null |
tests/test_detail.py
|
jwhitlock/drf-json-api
|
a62802432c612c34079f3c3694129f37778e2577
|
[
"MIT"
] | null | null | null |
from django.core.urlresolvers import reverse
from tests import models
from tests.utils import dump_json
import pytest
pytestmark = pytest.mark.django_db
def test_object(client):
models.Person.objects.create(name="test")
results = {
"people": {
"id": "1",
"href": "http://testserver/people/1/",
"name": "test",
}
}
response = client.get(reverse("person-detail", args=[1]))
assert response.content == dump_json(results)
def test_object_with_optional_links(client):
models.Person.objects.create(name="test")
results = {
"people": {
"id": "1",
"href": "http://testserver/people/1/",
"name": "test",
"links": {
"favorite_post": None,
"liked_comments": [],
}
},
"links": {
"people.favorite_post": {
"href": "http://testserver/posts/{people.favorite_post}/",
"type": "posts"
},
"people.liked_comments": {
"href": "http://testserver/comments/{people.liked_comments}/",
"type": "comments"
},
}
}
response = client.get(reverse("people-full-detail", args=[1]))
assert response.content == dump_json(results)
def test_update_attribute(client):
models.Person.objects.create(name="test")
data = dump_json({
"people": {
"name": "new test",
},
})
results = {
"people": {
"id": "1",
"href": "http://testserver/people/1/",
"name": "new test",
"links": {
"favorite_post": None,
"liked_comments": [],
}
},
"links": {
"people.favorite_post": {
"href": "http://testserver/posts/{people.favorite_post}/",
"type": "posts"
},
"people.liked_comments": {
"href": "http://testserver/comments/{people.liked_comments}/",
"type": "comments"
},
}
}
response = client.patch(
reverse("people-full-detail", args=[1]), data,
content_type="application/vnd.api+json")
assert response.content == dump_json(results)
def test_update_to_one_link(client):
models.Person.objects.create(name="test")
author = models.Person.objects.create(name="author")
post = models.Post.objects.create(title="The Post", author=author)
data = dump_json({
"people": {
"name": "test",
"links": {
"favorite_post": str(post.pk),
}
},
})
results = {
"people": {
"id": "1",
"href": "http://testserver/people/1/",
"name": "test",
"links": {
"favorite_post": str(post.pk),
"liked_comments": [],
}
},
"links": {
"people.favorite_post": {
"href": "http://testserver/posts/{people.favorite_post}/",
"type": "posts"
},
"people.liked_comments": {
"href": "http://testserver/comments/{people.liked_comments}/",
"type": "comments"
},
}
}
response = client.patch(
reverse("people-full-detail", args=[1]), data,
content_type="application/vnd.api+json")
assert response.content == dump_json(results)
def test_update_to_many_link(client):
models.Person.objects.create(name="test")
author = models.Person.objects.create(name="author")
post = models.Post.objects.create(title="The Post", author=author)
comment1 = models.Comment.objects.create(body="Comment 1", post=post)
comment2 = models.Comment.objects.create(body="Comment 2", post=post)
data = dump_json({
"people": {
"name": "test",
"links": {
"favorite_post": None,
"liked_comments": [str(comment1.pk), str(comment2.pk)],
}
},
})
results = {
"people": {
"id": "1",
"href": "http://testserver/people/1/",
"name": "test",
"links": {
"favorite_post": None,
"liked_comments": [str(comment1.pk), str(comment2.pk)],
}
},
"links": {
"people.favorite_post": {
"href": "http://testserver/posts/{people.favorite_post}/",
"type": "posts"
},
"people.liked_comments": {
"href": "http://testserver/comments/{people.liked_comments}/",
"type": "comments"
},
}
}
response = client.put(
reverse("people-full-detail", args=[1]), data,
content_type="application/vnd.api+json")
assert response.content == dump_json(results)
def test_object_with_pk_links(client):
models.Person.objects.create(name="test")
results = {
"people": {
"id": "1",
"href": "http://testserver/people/1/",
"name": "test",
"links": {
"favorite_post": None,
"liked_comments": []
}
},
"links": {
"people.favorite_post": {
"type": "posts"
},
"people.liked_comments": {
"type": "comments"
}
}
}
response = client.get(reverse("pk-people-full-detail", args=[1]))
assert response.content == dump_json(results)
def test_update_pk_attribute(client):
models.Person.objects.create(name="test")
data = dump_json({
"people": {
"name": "new test",
"links": {
"favorite_post": None,
"liked_comments": [],
}
},
})
results = {
"people": {
"id": "1",
"href": "http://testserver/people/1/",
"name": "new test",
"links": {
"favorite_post": None,
"liked_comments": []
}
},
"links": {
"people.favorite_post": {
"type": "posts"
},
"people.liked_comments": {
"type": "comments"
}
}
}
response = client.patch(
reverse("pk-people-full-detail", args=[1]), data,
content_type="application/vnd.api+json")
assert response.content == dump_json(results)
def test_update_to_one_pk_link(client):
models.Person.objects.create(name="test")
author = models.Person.objects.create(name="author")
post = models.Post.objects.create(title="The Post", author=author)
data = dump_json({
"people": {
"name": "test",
"links": {
"favorite_post": str(post.pk),
"liked_comments": []
}
},
})
results = {
"people": {
"id": "1",
"href": "http://testserver/people/1/",
"name": "test",
"links": {
"favorite_post": str(post.pk),
"liked_comments": []
}
},
"links": {
"people.favorite_post": {
"type": "posts"
},
"people.liked_comments": {
"type": "comments"
}
}
}
response = client.put(
reverse("pk-people-full-detail", args=[1]), data,
content_type="application/vnd.api+json")
assert response.content == dump_json(results)
def test_update_to_many_pk_link(client):
models.Person.objects.create(name="test")
author = models.Person.objects.create(name="author")
post = models.Post.objects.create(title="The Post", author=author)
comment1 = models.Comment.objects.create(body="Comment 1", post=post)
comment2 = models.Comment.objects.create(body="Comment 2", post=post)
data = dump_json({
"people": {
"name": "test",
"links": {
"favorite_post": None,
"liked_comments": [str(comment1.pk), str(comment2.pk)]
}
},
})
results = {
"people": {
"id": "1",
"href": "http://testserver/people/1/",
"name": "test",
"links": {
"favorite_post": None,
"liked_comments": [str(comment1.pk), str(comment2.pk)]
}
},
"links": {
"people.favorite_post": {
"type": "posts"
},
"people.liked_comments": {
"type": "comments"
}
}
}
response = client.put(
reverse("pk-people-full-detail", args=[1]), data,
content_type="application/vnd.api+json")
assert response.content == dump_json(results)
| 27.667692
| 78
| 0.48254
| 815
| 8,992
| 5.196319
| 0.08589
| 0.070838
| 0.072255
| 0.076741
| 0.956789
| 0.956789
| 0.956789
| 0.956316
| 0.956316
| 0.944746
| 0
| 0.007467
| 0.359542
| 8,992
| 324
| 79
| 27.753086
| 0.727904
| 0
| 0
| 0.72242
| 0
| 0
| 0.259675
| 0.044039
| 0
| 0
| 0
| 0
| 0.032028
| 1
| 0.032028
| false
| 0
| 0.014235
| 0
| 0.046263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f2df71e846f49f8838708b3699f9abb647794b4
| 16,515
|
py
|
Python
|
api_1.3/containerd/services/tasks/v1/tasks_pb2_grpc.py
|
Silvanoc/pycontainerd
|
7245ce623d978f65cd8a4cf0d685a3318640a305
|
[
"Apache-2.0"
] | null | null | null |
api_1.3/containerd/services/tasks/v1/tasks_pb2_grpc.py
|
Silvanoc/pycontainerd
|
7245ce623d978f65cd8a4cf0d685a3318640a305
|
[
"Apache-2.0"
] | null | null | null |
api_1.3/containerd/services/tasks/v1/tasks_pb2_grpc.py
|
Silvanoc/pycontainerd
|
7245ce623d978f65cd8a4cf0d685a3318640a305
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from containerd.services.tasks.v1 import tasks_pb2 as containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2
from containerd.vendor.google.protobuf import empty_pb2 as containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2
class TasksStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Create = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Create',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CreateTaskRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CreateTaskResponse.FromString,
)
self.Start = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Start',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.StartRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.StartResponse.FromString,
)
self.Delete = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Delete',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.DeleteTaskRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.DeleteResponse.FromString,
)
self.DeleteProcess = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/DeleteProcess',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.DeleteProcessRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.DeleteResponse.FromString,
)
self.Get = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Get',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.GetRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.GetResponse.FromString,
)
self.List = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/List',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ListTasksRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ListTasksResponse.FromString,
)
self.Kill = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Kill',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.KillRequest.SerializeToString,
response_deserializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Exec = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Exec',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ExecProcessRequest.SerializeToString,
response_deserializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ResizePty = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/ResizePty',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ResizePtyRequest.SerializeToString,
response_deserializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CloseIO = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/CloseIO',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CloseIORequest.SerializeToString,
response_deserializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Pause = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Pause',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.PauseTaskRequest.SerializeToString,
response_deserializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Resume = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Resume',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ResumeTaskRequest.SerializeToString,
response_deserializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListPids = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/ListPids',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ListPidsRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ListPidsResponse.FromString,
)
self.Checkpoint = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Checkpoint',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CheckpointTaskRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CheckpointTaskResponse.FromString,
)
self.Update = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Update',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.UpdateTaskRequest.SerializeToString,
response_deserializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Metrics = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Metrics',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.MetricsRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.MetricsResponse.FromString,
)
self.Wait = channel.unary_unary(
'/containerd.services.tasks.v1.Tasks/Wait',
request_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.WaitRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.WaitResponse.FromString,
)
class TasksServicer(object):
# missing associated documentation comment in .proto file
pass
def Create(self, request, context):
"""Create a task.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Start(self, request, context):
"""Start a process.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Delete a task and on disk state.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteProcess(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Get(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Kill(self, request, context):
"""Kill a task or process.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Exec(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ResizePty(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CloseIO(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Pause(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Resume(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListPids(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Checkpoint(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Update(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Metrics(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Wait(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TasksServicer_to_server(servicer, server):
rpc_method_handlers = {
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CreateTaskRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CreateTaskResponse.SerializeToString,
),
'Start': grpc.unary_unary_rpc_method_handler(
servicer.Start,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.StartRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.StartResponse.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.DeleteTaskRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.DeleteResponse.SerializeToString,
),
'DeleteProcess': grpc.unary_unary_rpc_method_handler(
servicer.DeleteProcess,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.DeleteProcessRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.DeleteResponse.SerializeToString,
),
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.GetRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.GetResponse.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ListTasksRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ListTasksResponse.SerializeToString,
),
'Kill': grpc.unary_unary_rpc_method_handler(
servicer.Kill,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.KillRequest.FromString,
response_serializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Exec': grpc.unary_unary_rpc_method_handler(
servicer.Exec,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ExecProcessRequest.FromString,
response_serializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ResizePty': grpc.unary_unary_rpc_method_handler(
servicer.ResizePty,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ResizePtyRequest.FromString,
response_serializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CloseIO': grpc.unary_unary_rpc_method_handler(
servicer.CloseIO,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CloseIORequest.FromString,
response_serializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Pause': grpc.unary_unary_rpc_method_handler(
servicer.Pause,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.PauseTaskRequest.FromString,
response_serializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Resume': grpc.unary_unary_rpc_method_handler(
servicer.Resume,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ResumeTaskRequest.FromString,
response_serializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListPids': grpc.unary_unary_rpc_method_handler(
servicer.ListPids,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ListPidsRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.ListPidsResponse.SerializeToString,
),
'Checkpoint': grpc.unary_unary_rpc_method_handler(
servicer.Checkpoint,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CheckpointTaskRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.CheckpointTaskResponse.SerializeToString,
),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.UpdateTaskRequest.FromString,
response_serializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Metrics': grpc.unary_unary_rpc_method_handler(
servicer.Metrics,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.MetricsRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.MetricsResponse.SerializeToString,
),
'Wait': grpc.unary_unary_rpc_method_handler(
servicer.Wait,
request_deserializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.WaitRequest.FromString,
response_serializer=containerd_dot_services_dot_tasks_dot_v1_dot_tasks__pb2.WaitResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'containerd.services.tasks.v1.Tasks', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 51.609375
| 127
| 0.786013
| 1,903
| 16,515
| 6.344193
| 0.062007
| 0.07289
| 0.095668
| 0.109335
| 0.891659
| 0.889174
| 0.889174
| 0.835666
| 0.769486
| 0.769486
| 0
| 0.010345
| 0.145444
| 16,515
| 319
| 128
| 51.77116
| 0.845107
| 0.064426
| 0
| 0.381132
| 1
| 0
| 0.106644
| 0.048906
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071698
| false
| 0.056604
| 0.011321
| 0
| 0.090566
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
3f44969aab11d0e9d6b9a2432a9ac5fb0782e852
| 7,180
|
py
|
Python
|
py_cf_new_py3/help_functions_py3.py
|
bopopescu/local_controller_3
|
e5d98ab60e1ccac7e0310abc55e83b671d4a643c
|
[
"MIT"
] | null | null | null |
py_cf_new_py3/help_functions_py3.py
|
bopopescu/local_controller_3
|
e5d98ab60e1ccac7e0310abc55e83b671d4a643c
|
[
"MIT"
] | null | null | null |
py_cf_new_py3/help_functions_py3.py
|
bopopescu/local_controller_3
|
e5d98ab60e1ccac7e0310abc55e83b671d4a643c
|
[
"MIT"
] | null | null | null |
class Help_Functions(object):
def __init__(self,cf):
self.cf = cf
#
# Return code functions
#
def terminate( self ):
self.cf.insert_link( "Terminate", [] )
def halt(self ):
self.cf.insert_link("Halt",[] )
def reset( self ):
self.cf.insert_link("Reset",[] )
def chain_flow_reset( self ):
self.cf.insert_link("Reset_Chain_Flow",[])
#
# Manipulate Chains
#
def enable_chains( self, list_of_chains ):
self.cf.insert_link("Enable_Chain",[list_of_chains] )
def disable_chains( self, list_of_chains ):
self.cf.insert_link("Disable_Chain",[list_of_chains] )
def suspend_chains( self, list_of_chains ):
self.cf.insert_link("Suspend_Chain",[list_of_chains] )
def resume_chains( self, list_of_chains ):
self.cf.insert_link("Resume_Chain",[list_of_chains] )
#
# debug function
#
def log( self, debug_message ):
self.cf.insert_link( "Log",[ debug_message ] )
#
# One Time Functions
#
def one_step( self, function,*params ):
list_data = [function]
list_data.extend(params)
self.cf.insert_link("One_Step",list_data )
#
# Event Functions
#
def send_event( self, event, event_data = "" ):
self.cf.insert_link("Send_Event",[event,event_data] )
#
# Opcode which acts on an event
#
def check_event( self, event, function, *params ):
list_data = [ event, function ]
list_data.extend(params)
self.cf.insert_link("Check_Event",list_data)
#
# user function with full return code flexiability
#
def code( self, function, *params ):
list_data = [ function ]
list_data.extend(params)
self.cf.insert_link("Code",list_data )
#
# Wait Functions
#
def wait_tod( self, dow="*",hour="*",minute="*",second="*" ):
self.cf.insert_link("Wait_Tod",[dow,hour,minute,second] )
def wait_tod_ge( self, dow="*",hour="*",minute="*",second="*" ):
self.cf.insert_link("Wait_Tod_GE",[dow,hour,minute,second] )
def wait_tod_le( self,dow="*",hour="*",minute="*",second="*" ):
self.cf.insert_link("Wait_Tod_LE",[dow,hour,minute,second] )
def wait_event_count( self,event="TIME_TICK", count = 1 ):
self.cf.insert_link("Wait_Event_Count",[event, count] )
def wait_function( self, function ,*params):
list_data = [function]
list_data.extend(params)
self.cf.insert_link("Wait_Fn", list_data)
#
# Verify Reset Functions
#
def verify_tod_reset( self, dow="*",hour="*",minute="*",second="*", reset_event = None, reset_data = None ):
self.cf.insert_link("Verify_Tod",[dow,hour,minute,second,[reset_event,reset_data], True] )
def verify_tod_ge_reset( self, dow="*",hour="*",minute="*",second="*",reset_event = None, reset_data = None ):
self.cf.insert_link("Verify_Tod_GE",[dow,hour,minute,second,[reset_event,reset_data],True] )
def verify_tod_le_reset( self,dow="*",hour="*",minute="*",second="*",reset_event = None , reset_data = None):
self.cf.insert_link("Verify_Tod_LE",[dow,hour,minute,second,[reset_event,reset_data],True] )
def verify_not_event_count_reset( self,event="TIME_TICK", count = 1, reset_event = None, reset_data = None ):
self.cf.insert_link("Verify_Not_Event_Count",[event, count, [reset_event,reset_data], True] )
def verify_function_reset( self, reset_event,reset_event_data, function, *params):
list_data = [function,[reset_event,reset_event_data], True]
list_data.extend(params)
self.cf.insert_link("Verify_Fn", list_data)
#
# Verify Terminate Functions
#
def verify_tod_terminate( self, dow="*",hour="*",minute="*",second="*", reset_event = None, reset_data = None ):
self.cf.insert_link("Verify_Tod",[dow,hour,minute,second,[reset_event,reset_data], False] )
def verify_tod_ge_terminate( self, dow="*",hour="*",minute="*",second="*",reset_event = None , reset_data = None):
self.cf.insert_link("Verify_Tod_GE",[dow,hour,minute,second,[reset_event,reset_data],False] )
def verify_tod_le_terminate( self,dow="*",hour="*",minute="*",second="*",reset_event = None, reset_data = None ):
self.cf.insert_link("Verify_Tod_LE",[dow,hour,minute,second,[reset_event,reset_data],False] )
def verify_not_event_count_terminate( self,event="TIME_TICK", count = 1, reset_event = None , reset_data = None):
self.cf.insert_link("Verify_Not_Event_Count",[event, count, [reset_event,reset_data] ,False] )
def verify_function_terminate( self, reset_event ,reset_event_data, function, *params):
list_data = [function,[reset_event,reset_event_data], False]
list_data.extend(params)
self.cf.insert_link("Verify_Fn", list_data)
#
# Assert Reset Functions
#
def assert_tod_reset( self, dow="*",hour="*",minute="*",second="*", reset_event = None, reset_data = None ):
self.cf.insert_link("Assert_Tod",[dow,hour,minute,second,[reset_event,reset_data], True] )
def assert_tod_ge_reset( self, dow="*",hour="*",minute="*",second="*",reset_event = None, reset_data = None ):
self.cf.insert_link("Assert_Tod_GE",[dow,hour,minute,second,[reset_event,reset_data],True] )
def assert_tod_le_reset( self,dow="*",hour="*",minute="*",second="*",reset_event = None , reset_data = None):
self.cf.insert_link("Assert_Tod_LE",[dow,hour,minute,second,[reset_event,reset_data],True] )
def assert_not_event_count_reset( self,event="TIME_TICK", count = 1, reset_event = None, reset_data = None ):
self.cf.insert_link("Assert_Not_Event_Count",[event, count, [reset_event,reset_data], True] )
def assert_function_reset( self, reset_event,reset_event_data, function, *params):
list_data = [function,[reset_event,reset_event_data], True]
list_data.extend(params)
self.cf.insert_link("Assert_Fn", list_data)
#
# Assert Terminate Functions
#
def assert_tod_terminate( self, dow="*",hour="*",minute="*",second="*", reset_event = None, reset_data = None ):
self.cf.insert_link("Assert_Tod",[dow,hour,minute,second,[reset_event,reset_data], False] )
def assert_tod_ge_terminate( self, dow="*",hour="*",minute="*",second="*",reset_event = None , reset_data = None):
self.cf.insert_link("Assert_Tod_GE",[dow,hour,minute,second,[reset_event,reset_data],False] )
def assert_tod_le_terminate( self,dow="*",hour="*",minute="*",second="*",reset_event = None, reset_data = None ):
self.cf.insert_link("Assert_Tod_LE",[dow,hour,minute,second,[reset_event,reset_data],False] )
def assert_not_event_count_terminate( self,event="TIME_TICK", count = 1, reset_event = None , reset_data = None):
self.cf.insert_link("Assert_Not_Event_Count",[event, count, [reset_event,reset_data] ,False] )
def assert_function_terminate( self, reset_event ,reset_event_data, function, *params):
list_data = [function,[reset_event,reset_event_data], False]
list_data.extend(params)
self.cf.insert_link("Assert_Fn", list_data)
if __name__ == "__main__":
pass
| 36.820513
| 117
| 0.668106
| 983
| 7,180
| 4.560529
| 0.070193
| 0.107071
| 0.101718
| 0.135623
| 0.82824
| 0.80058
| 0.788758
| 0.757082
| 0.757082
| 0.713362
| 0
| 0.000844
| 0.17493
| 7,180
| 195
| 118
| 36.820513
| 0.755908
| 0.042897
| 0
| 0.195876
| 0
| 0
| 0.081053
| 0.012875
| 0
| 0
| 0
| 0
| 0.206186
| 1
| 0.402062
| false
| 0.010309
| 0
| 0
| 0.412371
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f51c84a12f3179deac7648df2f69daca2ca21f6
| 35,313
|
py
|
Python
|
interactive_plot.py
|
dullemond/interactive_plot
|
4d71087ba1d88bd53871e60abf633539fa22423c
|
[
"MIT"
] | 1
|
2020-11-27T13:43:26.000Z
|
2020-11-27T13:43:26.000Z
|
interactive_plot.py
|
dullemond/interactive_plot
|
4d71087ba1d88bd53871e60abf633539fa22423c
|
[
"MIT"
] | null | null | null |
interactive_plot.py
|
dullemond/interactive_plot
|
4d71087ba1d88bd53871e60abf633539fa22423c
|
[
"MIT"
] | 1
|
2020-08-03T11:37:24.000Z
|
2020-08-03T11:37:24.000Z
|
#
# Interactive plotting tool
#
# Copyright (c) 2018 C.P. Dullemond
# Free software under the standard MIT License
#
import numpy as np
def interactive_plot(x, func, params, ymin=None, ymax=None, parnames=None, parunits=None, \
fig=None, ax=None, axmodel=None, parstart=None, iparstart=None, \
plotbutton=False, fixedpar=None, returnipar=False, block=False, \
paramsalt=None, altformat='', img_x=None, img_y=None, img_func=None, \
img_im=None, parformats=None, **kwargs):
"""
Plot the function func(x) with parameters given by the params
list of lists.
ARGUMENTS:
x Array of x values
func Function func(x,params)
params List of parameters, but with each parameter value
here given as a list of possible values.
OPTIONAL ARGUMENTS:
ymin Set vertical axis lower limit
ymax Set vertical axis upper limit
parnames Names of the params, e.g. ['A', 'omega']
If the parnames have an '=' sign (e.g. ['A = ', 'omega = '])
then the value of the parameters are written out.
parunits If set, a list of values by which the parameter values are divided
before being printed on the widget (only if parnames have '=').
It only affects the printing next to the sliders, and has no
other effect.
fig A pre-existing figure
ax A pre-existing axis
axmodel If set, this is the plot style of the model
parstart If set, set the sliders initially close to these values
iparstart If set, set the slider index values initially to these values
(note: iparstart is an alternative to parstart)
parformats If set, a list of format strings to use for displaying the parameter values
paramsalt If set, then instead of the params values, the paramsalt values
will be written after '=' (only if parnames is set, see above).
returnipar If True, then return ipar
block If True, then wait until window is closed
EXAMPLE 1 (Simplest example):
from interactive_plot import *
def func(x,param): return param[0]*np.sin(param[1]*x)
x = np.linspace(0,2*np.pi,100)
params = [np.linspace(0.1,1.,30),np.linspace(1.,3.,30)] # Choices of parameter values
interactive_plot(x, func, params, ymax=1., ymin=-1., parnames=['A = ','omega = '])
EXAMPLE 1-a (With plotting button instead of automatic replot; useful for heavier models):
from interactive_plot import *
def func(x,param): return param[0]*np.sin(param[1]*x)
x = np.linspace(0,2*np.pi,100)
params = [np.linspace(0.1,1.,30),np.linspace(1.,3.,30)] # Choices of parameter values
interactive_plot(x, func, params, ymax=1., ymin=-1., parnames=['A = ','omega = '],plotbutton=True)
EXAMPLE 1-b (Plotting the content of a pre-calculated 2-D array)
from interactive_plot import *
x = np.linspace(0,2*np.pi,100)
y_array = np.zeros((30,100))
omega = np.linspace(1,3.,30)
for i in range(30): y_array[i,:] = np.sin(omega[i]*x)
def func(x,param): return y_array[param[0],:]
params = [np.arange(30)] # Choices of parameter values
interactive_plot(x, func, params)
EXAMPLE 2 (Model fitting to data):
import numpy as np
import matplotlib.pyplot as plt
from interactive_plot import *
def func(x,param): return param[0]*np.sin(param[1]*x)
x = np.linspace(0,2*np.pi,100)
data = 0.5*np.sin(2.*x)*(1.0+0.6*np.random.normal(size=len(x)))
fig = plt.figure(1)
ax = plt.axes(xlim=(x.min(),x.max()),ylim=(-1.2,1.2))
axd, = ax.plot(x,data,'o',label='data')
plt.xlabel('x [cm]')
plt.ylabel('f [erg/s]')
params = [np.linspace(0.1,1.,30),np.linspace(1.,3.,30)] # Choices of parameter values
parstart = [0.6,2.0] # Initial guesses for parameters
interactive_plot(x, func, params, parnames=['A = ','omega = '], fig=fig, ax=ax, label='model',parstart=parstart)
ax.legend()
plt.show()
EXAMPLE 2-a (Model overplotting over an image):
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from interactive_plot import *
def func(x,param): return param[0]*np.sin(param[1]*x)
x = np.linspace(0,2*np.pi,100)
image = np.random.normal(size=(70,70)) # Make some image
fig = plt.figure(1)
extent = [x.min(),x.max(),-1.2,1.2]
axd = plt.imshow(image,extent=extent,cmap=cm.hot)
ax = plt.gca()
plt.axis(extent)
plt.xlabel('x [cm]')
plt.ylabel('f [erg/s]')
params = [np.linspace(0.1,1.,30),np.linspace(1.,3.,30)] # Choices of parameter values
parstart = [0.6,2.0] # Initial guesses for parameters
interactive_plot(x, func, params, parnames=['A = ','omega = '], fig=fig, ax=ax, label='model',parstart=parstart)
ax.legend()
plt.show()
EXAMPLE 3 (Fitting two models simultaneously to data):
import numpy as np
import matplotlib.pyplot as plt
from interactive_plot import *
def func(x,param): return np.vstack((param[0]*np.sin(param[1]*x),param[0]*np.cos(param[1]*x)))
x = np.linspace(0,2*np.pi,100)
data = 0.5*np.sin(2.*x)*(1.0+0.6*np.random.normal(size=len(x)))
fig = plt.figure(1)
ax = plt.axes(xlim=(x.min(),x.max()),ylim=(-1.2,1.2))
axd, = ax.plot(x,data,'o',label='data')
axm0, = ax.plot(x,data,'--',label='sin')
axm1, = ax.plot(x,data,':',label='cos')
axmodel= [axm0,axm1]
plt.xlabel('x [cm]')
plt.ylabel('f [erg/s]')
params = [np.linspace(0.1,1.,30),np.linspace(1.,3.,30)]
interactive_plot(x, func, params, parnames=['A = ','omega = '], fig=fig, ax=ax, axmodel=axmodel)
ax.legend()
plt.show()
EXAMPLE 3-a (Fitting two models in two separate plots simultaneously):
import numpy as np
import matplotlib.pyplot as plt
from interactive_plot import *
def func(x,param): return np.vstack((param[0]*np.sin(param[1]*x),param[0]*np.cos(param[1]*x)))
x = np.linspace(0,2*np.pi,100)
data = 0.5*np.sin(2.*x)*(1.0+0.6*np.random.normal(size=len(x)))
extent = [x.min(),x.max(),-1.2,1.2]
fig, axes = plt.subplots(ncols=2)
axes[0].axis(extent)
axes[1].axis(extent)
axd0, = axes[0].plot(x,data,'o',label='data')
axm0, = axes[0].plot(x,data,'--',label='sin')
axd1, = axes[1].plot(x,data,'o',label='data')
axm1, = axes[1].plot(x,data,':',label='cos')
axmodel= [axm0,axm1]
params = [np.linspace(0.1,1.,30),np.linspace(1.,3.,30)]
interactive_plot(x, func, params, parnames=['A = ','omega = '], fig=fig, ax=0, axmodel=axmodel)
plt.show()
EXAMPLE 4: (passing additional fixed parameters to function):
from interactive_plot import *
def func(x,param,fixedpar={}): return param[0]*np.sin(param[1]*x)+fixedpar['offset']
x = np.linspace(0,2*np.pi,100)
params = [np.linspace(0.1,1.,30),np.linspace(1.,3.,30)] # Choices of parameter values
interactive_plot(x, func, params, ymax=1., ymin=-1., parnames=['A = ','omega = '],fixedpar={'offset':0.6})
EXAMPLE 5: (Interactive image, e.g. 2D slice from a higher-dimensional data box)
import numpy as np
from interactive_plot import *
from matplotlib import cm
from matplotlib import colors
import matplotlib.pyplot as plt
from matplotlib.image import NonUniformImage
x = np.linspace(-1,1,20)
y = np.linspace(-1,1,30)
z = np.linspace(0,1,25)
xx,yy,zz = np.meshgrid(x,y,z,indexing='ij')
rr = np.sqrt(xx**2+yy**2)
f = np.sin(xx*2*np.pi)*yy*(1-zz)+np.cos(2*np.pi*rr)*zz
norm = colors.Normalize(vmin=f.min(),vmax=f.max())
cmap = cm.hot
fig,ax = plt.subplots()
im = NonUniformImage(ax,interpolation='nearest',cmap=cmap,norm=norm)
im.set_data(x,y,f[:,:,0].T)
ax.images.append(im)
ax.set_xlim((x[0]-0.5*(x[1]-x[0]),x[-1]+0.5*(x[-1]-x[-2])))
ax.set_ylim((y[0]-0.5*(y[1]-y[0]),y[-1]+0.5*(y[-1]-y[-2])))
cbar=fig.colorbar(cm.ScalarMappable(norm=norm,cmap=cmap), ax=ax)
cbar.set_label(r'$T\;[\mathrm{K}]$')
def img_func(param,fixedpar={}): return fixedpar['f'][:,:,param[0]]
params = [np.arange(25)] # Choices of parameter values
fixedpar = {}
fixedpar["f"]=f
interactive_plot(None, None, params, fixedpar=fixedpar, \
img_x=x,img_y=y,img_func=img_func,img_im=im, \
fig=fig,ax=ax)
"""
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button, RadioButtons
# Compute spacing of plot, sliders and button
hslider = 0.03
nslidrscl= 6
if(len(params)>nslidrscl):
hslider *= float(nslidrscl)/len(params)
dyslider = hslider*(4./3.)
xslider = 0.3
wslider = 0.3
hbutton = 0.06
wbutton = 0.15
xbutton = 0.3
dybutton = hbutton+0.01
panelbot = 0.0
controlh = panelbot + len(params)*dyslider
if plotbutton: controlh += dybutton
controltop = panelbot + controlh
bmargin = 0.15
# generate figure
if fig is None: fig = plt.figure()
fig.subplots_adjust(top=0.95,bottom=controltop+bmargin)
# Set the initial values
indexinit = np.zeros(len(params),dtype=int)
if parstart is not None:
for i in range(len(params)):
if parstart[i] in params[i]:
idx = np.where(np.array(params[i])==parstart[i])[0]
if len(idx)>0:
indexinit[i] = idx[0]
else:
if params[i][-1]>params[i][0]:
idx = np.where(np.array(params[i])<parstart[i])[0]
if len(idx)>0:
indexinit[i] = idx[-1]
else:
idx = np.where(np.array(params[i])>parstart[i])[0]
if len(idx)>0:
indexinit[i] = idx[0]
if iparstart is not None:
indexinit[:] = iparstart[:]
# select first image
par = []
for i in range(len(params)):
par.append(params[i][indexinit[i]])
xmin = None
xmax = None
if x is not None:
if xmin is None: xmin = x.min()
if xmax is None: xmax = x.max()
if func is not None:
if fixedpar is not None:
f = func(x,par,fixedpar=fixedpar)
else:
f = func(x,par)
# set range
if ymin is None: ymin = f.min()
if ymax is None: ymax = f.max()
if img_y is not None:
if ymin is None: ymin = img_y.min()
if ymax is None: ymax = img_y.max()
if ymin>img_y.min(): ymin = img_y.min()
if ymax<img_y.max(): ymax = img_y.max()
if img_x is not None:
if xmin is None: xmin = img_x.min()
if xmax is None: xmax = img_x.max()
if xmin>img_x.min(): xmin = img_x.min()
if xmax<img_x.max(): xmax = img_x.max()
assert (xmin is not None) or (xmax is not None) , 'Error: x undefined'
assert (ymin is not None) or (ymay is not None) , 'Error: y undefined'
# display function(s)
if ax is None: ax = plt.axes(xlim=(xmin,xmax),ylim=(ymin,ymax))
if axmodel is None:
if func is not None:
if len(f.shape)==1:
# Normal case: a single model function
axmodel, = ax.plot(x,f,**kwargs)
else:
# Special case: multiple model functions: f[imodel,:]
assert len(f.shape)==2, 'Model returns array with more than 2 dimensions. No idea what to do.'
axmodel = []
for i in range(f.shape[0]):
axm, = ax.plot(x,f[i,:],**kwargs)
axmodel.append(axm)
sliders = []
for i in range(len(params)):
# define slider
axcolor = 'lightgoldenrodyellow'
axs = fig.add_axes([xslider, controltop-i*dyslider, xslider+wslider, hslider], facecolor=axcolor)
if parnames is not None:
name = parnames[i]
else:
name = 'Parameter {0:d}'.format(i)
slider = Slider(axs, name, 0, len(params[i]) - 1,
valinit=indexinit[i], valfmt='%i')
sliders.append(slider)
if plotbutton:
axb = fig.add_axes([xbutton, panelbot+0.2*hbutton, xbutton+wbutton, hbutton])
pbutton = Button(axb,'Plot')
else:
pbutton = None
class callbackplot(object):
def __init__(self,x,func,params,sliders,pbutton=None,fixedpar=None,ipar=None, \
img_x=None, img_y=None, img_func=None, img_im=None):
self.x = x
self.func = func
self.params = params
self.sliders = sliders
self.pbutton = pbutton
self.fixedpar = fixedpar
self.parunits = parunits
self.parformats= parformats
self.paramsalt= paramsalt
self.altformat= altformat
self.img_x = img_x
self.img_y = img_y
self.img_func = img_func
self.img_im = img_im
self.closed = False
if ipar is None:
self.ipar = np.zeros(len(sliders),dtype=int)
else:
self.ipar = ipar
def handle_close(self,event):
self.closed = True
def myreadsliders(self):
for isl in range(len(self.sliders)):
ind = int(self.sliders[isl].val)
self.ipar[isl]=ind
par = []
for i in range(len(self.ipar)):
ip = self.ipar[i]
value = self.params[i][ip]
par.append(value)
name = self.sliders[i].label.get_text()
if '=' in name:
namebase = name.split('=')[0]
if self.paramsalt is not None:
vls = "{0:" + self.altformat + "}"
name = namebase + "= " + vls.format(self.paramsalt[i][ip])
else:
if self.parunits is not None:
valunit = self.parunits[i]
else:
valunit = 1.0
if self.parformats is not None:
fmt = self.parformats[i]
else:
fmt = '13.6e'
name = namebase + "= {0:"+fmt+"}"
name = name.format(value/valunit)
self.sliders[i].label.set_text(name)
return par
def myreplot(self,par):
if self.x is not None and self.func is not None:
x = self.x
if self.fixedpar is not None:
f = self.func(x,par,fixedpar=self.fixedpar)
else:
f = self.func(x,par)
if len(f.shape)==1:
axmodel.set_data(x,f)
else:
for i in range(f.shape[0]):
axmodel[i].set_data(x,f[i,:])
if self.img_x is not None and self.img_y is not None and self.img_func is not None and self.img_im is not None:
x = self.img_x
y = self.img_y
if self.fixedpar is not None:
z = self.img_func(par,fixedpar=self.fixedpar)
else:
z = self.img_func(par)
self.img_im.set_data(x,y,z.T)
plt.draw()
def mysupdate(self,event):
par = self.myreadsliders()
if self.pbutton is None: self.myreplot(par)
def mybupdate(self,event):
par = self.myreadsliders()
if self.pbutton is not None: self.pbutton.label.set_text('Computing...')
plt.pause(0.01)
self.myreplot(par)
if self.pbutton is not None: self.pbutton.label.set_text('Plot')
mcb = callbackplot(x,func,params,sliders,pbutton=pbutton,fixedpar=fixedpar,ipar=indexinit, \
img_x=img_x,img_y=img_y,img_func=img_func,img_im=img_im)
mcb.mybupdate(0)
if plotbutton:
pbutton.on_clicked(mcb.mybupdate)
for s in sliders:
s.on_changed(mcb.mysupdate)
fig._mycallback = mcb
if block:
plt.show(block=True)
if returnipar:
return mcb.ipar
def interactive_curve(t, func, params, xmin=None, xmax=None, ymin=None, ymax=None, parnames=None, parunits=None, fig=None, ax=None, axmodel=None, parstart=None, iparstart=None, plotbutton=False, fixedpar=None, returnipar=False, block=False, **kwargs):
"""
Plot the 2-D curve x,y = func(t) with parameters given by the params
list of lists.
ARGUMENTS:
t Array of t values
func Function func(x,params)
params List of parameters, but with each parameter value
here given as a list of possible values.
OPTIONAL ARGUMENTS:
xmin Set horizontal axis lower limit
xmax Set horizontal axis upper limit
ymin Set vertical axis lower limit
ymax Set vertical axis upper limit
parnames Names of the params, e.g. ['A', 'omega']
If the parnames have an '=' sign (e.g. ['A = ', 'omega = '])
then the value of the parameters are written out.
parunits If set, a list of values by which the parameter values are divided
before being printed on the widget (only if parnames have '=').
It only affects the printing next to the sliders, and has no
other effect.
fig A pre-existing figure
ax A pre-existing axis
parstart If set, set the sliders initially close to these values
iparstart If set, set the slider index values initially to these values
(note: iparstart is an alternative to parstart)
returnipar If True, then return ipar
block If True, then wait until window is closed
EXAMPLE 1 (one ellipse):
from interactive_plot import *
def func(t,param):
x = param[0]*np.cos(t)
y = param[1]*np.sin(t)
csw = np.cos(param[2])
snw = np.sin(param[2])
return csw*x-snw*y,snw*x+csw*y
t = np.linspace(0,2*np.pi,100)
params = [np.linspace(0.1,1.,30),np.linspace(0.1,1.,30),np.linspace(0.,np.pi,30)]
interactive_curve(t, func, params, xmax=1., xmin=-1., ymax=1., ymin=-1., parnames=['Ax = ','Ay = ','omega = '],iparstart=[10,15,12])
EXAMPLE 1-a (With plotting button instead of automatic replot; useful for heavier models):
from interactive_plot import *
def func(t,param):
x = param[0]*np.cos(t)
y = param[1]*np.sin(t)
csw = np.cos(param[2])
snw = np.sin(param[2])
return csw*x-snw*y,snw*x+csw*y
t = np.linspace(0,2*np.pi,100)
params = [np.linspace(0.1,1.,30),np.linspace(0.1,1.,30),np.linspace(0.,np.pi,30)]
interactive_curve(t, func, params, xmax=1., xmin=-1., ymax=1., ymin=-1., parnames=['Ax = ','Ay = ','omega = '],iparstart=[10,15,12],plotbutton=True)
EXAMPLE 2 (two ellipses):
import numpy as np
import matplotlib.pyplot as plt
from interactive_plot import *
def func(t,param):
x = param[0]*np.cos(t)
y = param[1]*np.sin(t)
csw = np.cos(param[2])
snw = np.sin(param[2])
return np.vstack((csw*x-snw*y,-csw*x-snw*y)),np.vstack((snw*x+csw*y,snw*x+csw*y))
t = np.linspace(0,2*np.pi,100)
params = [np.linspace(0.1,1.,30),np.linspace(0.1,1.,30),np.linspace(0.,np.pi,30)]
fig = plt.figure(1)
ax = plt.axes(xlim=(-1.2,1.2),ylim=(-1.2,1.2))
x,y = func(t,[1.,1.,1.])
axm0, = ax.plot(x[0,:],y[0,:],'--',label='left')
axm1, = ax.plot(x[1,:],y[1,:],':',label='right')
axmodel= [axm0,axm1]
interactive_curve(t, func, params, xmax=1., xmin=-1., ymax=1., ymin=-1., parnames=['Ax = ','Ay = ','omega = '],iparstart=[10,15,12], fig=fig, ax=ax, axmodel=axmodel)
EXAMPLE 3 (as example 2, but now each ellipse in its own panel):
import numpy as np
import matplotlib.pyplot as plt
from interactive_plot import *
def func(t,param):
x = param[0]*np.cos(t)
y = param[1]*np.sin(t)
csw = np.cos(param[2])
snw = np.sin(param[2])
return np.vstack((csw*x-snw*y,-csw*x-snw*y)),np.vstack((snw*x+csw*y,snw*x+csw*y))
t = np.linspace(0,2*np.pi,100)
params = [np.linspace(0.1,1.,30),np.linspace(0.1,1.,30),np.linspace(0.,np.pi,30)]
fig, axes = plt.subplots(nrows=2)
axes[0].set_xlim((-1.2,1.2))
axes[0].set_ylim((-1.2,1.2))
axes[1].set_xlim((-1.2,1.2))
axes[1].set_ylim((-0.8,0.8))
x,y = func(t,[1.,1.,1.])
axm0, = axes[0].plot(x[0,:],y[0,:],'--',label='left')
axm1, = axes[1].plot(x[1,:],y[1,:],':',label='right')
axmodel= [axm0,axm1]
interactive_curve(t, func, params, xmax=1., xmin=-1., ymax=1., ymin=-1., parnames=['Ax = ','Ay = ','omega = '],iparstart=[10,15,12], fig=fig, ax=axes[0], axmodel=axmodel)
"""
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button, RadioButtons
# Compute spacing of plot, sliders and button
hslider = 0.03
nslidrscl= 6
if(len(params)>nslidrscl):
hslider *= float(nslidrscl)/len(params)
dyslider = hslider*(4./3.)
xslider = 0.3
wslider = 0.3
hbutton = 0.06
wbutton = 0.15
xbutton = 0.3
dybutton = hbutton+0.01
panelbot = 0.0
controlh = panelbot + len(params)*dyslider
if plotbutton: controlh += dybutton
controltop = panelbot + controlh
bmargin = 0.15
# generate figure
if fig is None: fig = plt.figure()
fig.subplots_adjust(top=0.95,bottom=controltop+bmargin)
# Set the initial values
indexinit = np.zeros(len(params),dtype=int)
if parstart is not None:
for i in range(len(params)):
if parstart[i] in params[i]:
idx = np.where(np.array(params[i])==parstart[i])[0]
if len(idx)>0:
indexinit[i] = idx[0]
else:
if params[i][-1]>params[i][0]:
idx = np.where(np.array(params[i])<parstart[i])[0]
if len(idx)>0:
indexinit[i] = idx[-1]
else:
idx = np.where(np.array(params[i])>parstart[i])[0]
if len(idx)>0:
indexinit[i] = idx[0]
if iparstart is not None:
indexinit[:] = iparstart[:]
# select first image
par = []
for i in range(len(params)):
par.append(params[i][indexinit[i]])
if fixedpar is not None:
x, y = func(t,par,fixedpar=fixedpar)
else:
x, y = func(t,par)
# set range
if xmin is None: xmin = x.min()
if xmax is None: xmax = x.max()
if ymin is None: ymin = y.min()
if ymax is None: ymax = y.max()
# display function
if ax is None: ax = plt.axes(xlim=(xmin,xmax),ylim=(ymin,ymax))
if axmodel is None:
if len(x.shape)==1:
# Normal case: a single model function
assert len(x.shape)==1, 'Cannot have multiple y and single x'
axmodel, = ax.plot(x,y,**kwargs)
else:
# Special case: multiple model functions: f[imodel,:]
assert len(x.shape)==2, 'Model returns array with more than 2 dimensions. No idea what to do.'
assert len(y.shape)==2, 'Cannot have multiple x and single y'
axmodel = []
for i in range(x.shape[0]):
axm, = ax.plot(x[i,:],y[i,:],**kwargs)
axmodel.append(axm)
sliders = []
for i in range(len(params)):
# define slider
axcolor = 'lightgoldenrodyellow'
axs = fig.add_axes([xslider, controltop-i*dyslider, xslider+wslider, hslider], facecolor=axcolor)
if parnames is not None:
name = parnames[i]
else:
name = 'Parameter {0:d}'.format(i)
slider = Slider(axs, name, 0, len(params[i]) - 1,
valinit=indexinit[i], valfmt='%i')
sliders.append(slider)
if plotbutton:
axb = fig.add_axes([xbutton, panelbot+0.2*hbutton, xbutton+wbutton, hbutton])
pbutton = Button(axb,'Plot')
else:
pbutton = None
class callbackcurve(object):
def __init__(self,t,func,params,sliders,pbutton=None,fixedpar=None,ipar=None):
self.t = t
self.func = func
self.params = params
self.sliders = sliders
self.pbutton = pbutton
self.fixedpar = fixedpar
self.parunits = parunits
self.closed = False
if ipar is None:
self.ipar = np.zeros(len(sliders),dtype=int)
else:
self.ipar = ipar
def handle_close(self,event):
self.closed = True
def myreadsliders(self):
for isl in range(len(self.sliders)):
ind = int(self.sliders[isl].val)
self.ipar[isl]=ind
par = []
for i in range(len(self.ipar)):
ip = self.ipar[i]
value = self.params[i][ip]
par.append(value)
name = self.sliders[i].label.get_text()
if '=' in name:
namebase = name.split('=')[0]
if self.parunits is not None:
valunit = self.parunits[i]
else:
valunit = 1.0
name = namebase + "= {0:13.6e}".format(value/valunit)
self.sliders[i].label.set_text(name)
return par
def myreplot(self,par):
t = self.t
if self.fixedpar is not None:
x,y = self.func(t,par,fixedpar=self.fixedpar)
else:
x,y = self.func(t,par)
if len(x.shape)==1:
axmodel.set_data(x,y)
else:
for i in range(x.shape[0]):
axmodel[i].set_data(x[i,:],y[i,:])
plt.draw()
def mysupdate(self,event):
par = self.myreadsliders()
if self.pbutton is None: self.myreplot(par)
def mybupdate(self,event):
par = self.myreadsliders()
if self.pbutton is not None: self.pbutton.label.set_text('Computing...')
plt.pause(0.01)
self.myreplot(par)
if self.pbutton is not None: self.pbutton.label.set_text('Plot')
mcb = callbackcurve(t,func,params,sliders,pbutton=pbutton,fixedpar=fixedpar,ipar=indexinit)
mcb.mybupdate(0)
if plotbutton:
pbutton.on_clicked(mcb.mybupdate)
for s in sliders:
s.on_changed(mcb.mysupdate)
fig._mycallback = mcb
if block:
plt.show(block=True)
if returnipar:
return mcb.ipar
def interactive_anything(update, params, parnames=None, parunits=None, fig=None, ax=None, axmodel=None, parstart=None, iparstart=None, plotbutton=False, fixedpar=None, returnipar=False, block=False, **kwargs):
"""
This is an even more general interactive plotting tool: Here you create
your own plot and your own update function, and all that the tool does
is call your update function whenever necessary, with the parameters
given by the sliders. This gives you full flexibility to animate any
matplotlib object (or set of objects) in any way you wish.
ARGUMENTS:
update The update function update(params)
params List of parameters, but with each parameter value
here given as a list of possible values.
OPTIONAL ARGUMENTS:
parnames Names of the params, e.g. ['A', 'omega']
If the parnames have an '=' sign (e.g. ['A = ', 'omega = '])
then the value of the parameters are written out.
parunits If set, a list of values by which the parameter values are divided
before being printed on the widget (only if parnames have '=').
It only affects the printing next to the sliders, and has no
other effect.
fig A pre-existing figure
ax A pre-existing axis
axmodel A list of objects to animate
parstart If set, set the sliders initially close to these values
iparstart If set, set the slider index values initially to these values
(note: iparstart is an alternative to parstart)
returnipar If True, then return ipar
block If True, then wait until window is closed
EXAMPLE 1 (two ellipses: Identical to EXAMPLE 2 of interactive_curve()):
import numpy as np
import matplotlib.pyplot as plt
from interactive_plot_new import *
def func(t,param):
x = param[0]*np.cos(t)
y = param[1]*np.sin(t)
csw = np.cos(param[2])
snw = np.sin(param[2])
return np.vstack((csw*x-snw*y,-csw*x-snw*y)),np.vstack((snw*x+csw*y,snw*x+csw*y))
def update(param,ax=None,axmodel=None,fixedpar={}):
t = np.linspace(0,2*np.pi,100)
x,y = fixedpar['func'](t,param)
axmodel[0].set_data(x[0,:],y[0,:])
axmodel[1].set_data(x[1,:],y[1,:])
plt.draw()
t = np.linspace(0,2*np.pi,100)
params = [np.linspace(0.1,1.,30),np.linspace(0.1,1.,30),np.linspace(0.,np.pi,30)]
fig = plt.figure(1)
ax = plt.axes(xlim=(-1.2,1.2),ylim=(-1.2,1.2))
x,y = func(t,[1.,1.,1.])
axm0, = ax.plot(x[0,:],y[0,:],'--',label='left')
axm1, = ax.plot(x[1,:],y[1,:],':',label='right')
axmodel= [axm0,axm1]
fixedpar = {'func':func}
interactive_anything(update, params, parnames=['Ax = ','Ay = ','omega = '],iparstart=[10,15,12], fig=fig, ax=ax, axmodel=axmodel, fixedpar=fixedpar)
"""
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button, RadioButtons
# Compute spacing of plot, sliders and button
hslider = 0.03
nslidrscl= 6
if(len(params)>nslidrscl):
hslider *= float(nslidrscl)/len(params)
dyslider = hslider*(4./3.)
xslider = 0.3
wslider = 0.3
hbutton = 0.06
wbutton = 0.15
xbutton = 0.3
dybutton = hbutton+0.01
panelbot = 0.0
controlh = panelbot + len(params)*dyslider
if plotbutton: controlh += dybutton
controltop = panelbot + controlh
bmargin = 0.15
# Checks
assert fig is not None, "Must set fig=..."
assert ax is not None, "Must set ax=..."
assert axmodel is not None, "Must set axmodel=..."
assert type(axmodel) is list, "axmodel must be a list of axis objects"
# Adjust
fig.subplots_adjust(top=0.95,bottom=controltop+bmargin)
# Set the initial values
indexinit = np.zeros(len(params),dtype=int)
if parstart is not None:
for i in range(len(params)):
if parstart[i] in params[i]:
idx = np.where(np.array(params[i])==parstart[i])[0]
if len(idx)>0:
indexinit[i] = idx[0]
else:
if params[i][-1]>params[i][0]:
idx = np.where(np.array(params[i])<parstart[i])[0]
if len(idx)>0:
indexinit[i] = idx[-1]
else:
idx = np.where(np.array(params[i])>parstart[i])[0]
if len(idx)>0:
indexinit[i] = idx[0]
if iparstart is not None:
indexinit[:] = iparstart[:]
# select first image
par = []
for i in range(len(params)):
par.append(params[i][indexinit[i]])
if fixedpar is not None:
update(par,ax=ax,axmodel=axmodel,fixedpar=fixedpar)
else:
update(par,ax=ax,axmodel=axmodel)
sliders = []
for i in range(len(params)):
# define slider
axcolor = 'lightgoldenrodyellow'
axs = fig.add_axes([xslider, controltop-i*dyslider, xslider+wslider, hslider], facecolor=axcolor)
if parnames is not None:
name = parnames[i]
else:
name = 'Parameter {0:d}'.format(i)
slider = Slider(axs, name, 0, len(params[i]) - 1,
valinit=indexinit[i], valfmt='%i')
sliders.append(slider)
if plotbutton:
axb = fig.add_axes([xbutton, panelbot+0.2*hbutton, xbutton+wbutton, hbutton])
pbutton = Button(axb,'Plot')
else:
pbutton = None
class callbackanything(object):
def __init__(self,update,params,sliders,pbutton=None,fixedpar=None,ipar=None,ax=None,axmodel=None):
self.update = update
self.params = params
self.sliders = sliders
self.pbutton = pbutton
self.fixedpar = fixedpar
self.parunits = parunits
self.ax = ax
self.axmodel = axmodel
self.closed = False
if ipar is None:
self.ipar = np.zeros(len(sliders),dtype=int)
else:
self.ipar = ipar
def handle_close(self,event):
self.closed = True
def myreadsliders(self):
for isl in range(len(self.sliders)):
ind = int(self.sliders[isl].val)
self.ipar[isl]=ind
par = []
for i in range(len(self.ipar)):
ip = self.ipar[i]
value = self.params[i][ip]
par.append(value)
name = self.sliders[i].label.get_text()
if '=' in name:
namebase = name.split('=')[0]
if self.parunits is not None:
valunit = self.parunits[i]
else:
valunit = 1.0
name = namebase + "= {0:13.6e}".format(value/valunit)
self.sliders[i].label.set_text(name)
return par
def myreplot(self,par):
if self.fixedpar is not None:
self.update(par,ax=self.ax,axmodel=self.axmodel,fixedpar=self.fixedpar)
else:
self.update(par,ax=self.ax,axmodel=self.axmodel)
def mysupdate(self,event):
par = self.myreadsliders()
if self.pbutton is None: self.myreplot(par)
def mybupdate(self,event):
par = self.myreadsliders()
if self.pbutton is not None: self.pbutton.label.set_text('Computing...')
plt.pause(0.01)
self.myreplot(par)
if self.pbutton is not None: self.pbutton.label.set_text('Plot')
mcb = callbackanything(update,params,sliders,pbutton=pbutton,fixedpar=fixedpar,ipar=indexinit,ax=ax,axmodel=axmodel)
mcb.mybupdate(0)
if plotbutton:
pbutton.on_clicked(mcb.mybupdate)
for s in sliders:
s.on_changed(mcb.mysupdate)
fig._mycallback = mcb
if block:
plt.show(block=True)
if returnipar:
return mcb.ipar
| 40.357714
| 251
| 0.558973
| 4,964
| 35,313
| 3.946414
| 0.080983
| 0.023992
| 0.020674
| 0.011026
| 0.822665
| 0.785911
| 0.760949
| 0.744359
| 0.725319
| 0.706636
| 0
| 0.028702
| 0.307394
| 35,313
| 874
| 252
| 40.40389
| 0.772263
| 0.416985
| 0
| 0.794816
| 0
| 0
| 0.028308
| 0
| 0
| 0
| 0
| 0
| 0.021598
| 1
| 0.045356
| false
| 0
| 0.015119
| 0
| 0.079914
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f706304b5a6553d1e58b67d2dda241c9a269937
| 1,665
|
py
|
Python
|
test/test_logging.py
|
cuongnguyen2190/pure-python-adb
|
f8424f349cb7a72967690a9279f117799c16e8a0
|
[
"MIT"
] | 382
|
2018-07-12T07:56:47.000Z
|
2022-03-31T08:12:09.000Z
|
test/test_logging.py
|
cuongnguyen2190/pure-python-adb
|
f8424f349cb7a72967690a9279f117799c16e8a0
|
[
"MIT"
] | 74
|
2018-10-10T15:23:24.000Z
|
2022-03-31T00:27:10.000Z
|
test/test_logging.py
|
cuongnguyen2190/pure-python-adb
|
f8424f349cb7a72967690a9279f117799c16e8a0
|
[
"MIT"
] | 84
|
2018-07-08T17:15:54.000Z
|
2022-03-31T08:12:12.000Z
|
from ppadb.utils.logger import AdbLogging
import logging
def test_without_logging(capsys):
logger = AdbLogging.get_logger("ppadb.test")
logger.addHandler(logging.StreamHandler())
logger.info("INFO message")
captured = capsys.readouterr()
assert not captured.out
assert not captured.err
logger.warning("WARNING message")
captured = capsys.readouterr()
assert not captured.out
assert not captured.err
logger.debug("DEBUG message")
assert not captured.out
assert not captured.err
def test_without_log_message_after_set_root_logger_level(capsys):
logging.basicConfig()
logger = AdbLogging.get_logger("ppadb.test")
logger.addHandler(logging.StreamHandler())
logging.getLogger().setLevel(logging.DEBUG)
logger.info("INFO message")
captured = capsys.readouterr()
assert not captured.out
assert not captured.err
logger.warning("WARNING message")
captured = capsys.readouterr()
assert not captured.out
assert not captured.err
logger.debug("DEBUG message")
assert not captured.out
assert not captured.err
def test_enable_log_message(capsys):
logging.basicConfig()
logger = AdbLogging.get_logger("ppadb.test")
logger.addHandler(logging.StreamHandler())
logging.getLogger("ppadb").setLevel(logging.DEBUG)
logger.info("INFO message")
captured = capsys.readouterr()
assert not captured.out
assert captured.err
logger.warning("WARNING message")
captured = capsys.readouterr()
assert not captured.out
assert captured.err
logger.debug("DEBUG message")
assert not captured.out
assert captured.err
| 26.428571
| 65
| 0.723724
| 198
| 1,665
| 6.005051
| 0.166667
| 0.113541
| 0.214466
| 0.151388
| 0.884777
| 0.884777
| 0.884777
| 0.875526
| 0.875526
| 0.875526
| 0
| 0
| 0.180781
| 1,665
| 62
| 66
| 26.854839
| 0.871701
| 0
| 0
| 0.854167
| 0
| 0
| 0.093149
| 0
| 0
| 0
| 0
| 0
| 0.375
| 1
| 0.0625
| false
| 0
| 0.041667
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
58d68864bbac1804f10ff984858fa597f39c0409
| 164
|
py
|
Python
|
test_botty/tests/__init__.py
|
ColumbiaSC-Tech/botty_mcbotface
|
f72fbc7e899650b40b0cf0197c9e6006a5224669
|
[
"MIT"
] | 11
|
2017-09-16T21:00:52.000Z
|
2020-10-08T19:58:09.000Z
|
test_botty/tests/__init__.py
|
ColumbiaSC-Tech/botty_mcbotface
|
f72fbc7e899650b40b0cf0197c9e6006a5224669
|
[
"MIT"
] | 12
|
2018-10-30T23:52:34.000Z
|
2021-12-13T19:46:21.000Z
|
test_botty/tests/__init__.py
|
ColumbiaSC-Tech/botty_mcbotface
|
f72fbc7e899650b40b0cf0197c9e6006a5224669
|
[
"MIT"
] | 3
|
2017-09-15T13:22:37.000Z
|
2019-08-01T11:42:14.000Z
|
from botty_mcbotface.task_runner import stop_task_runner
def tearDownModule():
"""called once, after everything else in this module"""
stop_task_runner()
| 23.428571
| 59
| 0.768293
| 22
| 164
| 5.454545
| 0.772727
| 0.25
| 0.233333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152439
| 164
| 6
| 60
| 27.333333
| 0.863309
| 0.29878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
58e71a4c4711c69542991ae7a9ce48c622b5ad8c
| 56,960
|
py
|
Python
|
WindFarmGenetic.py
|
JuXinglong/WFLOP_SUGGA_Python
|
27ae25a40be7fe2258894a741013aa46e9d1e4b3
|
[
"MIT"
] | 23
|
2019-12-21T04:14:11.000Z
|
2021-10-18T13:42:34.000Z
|
WindFarmGenetic.py
|
candleinwindsteve/WFLOP_SUGGA_Python
|
f9b0d0739d3cebfe194605b5696ba012841e3231
|
[
"MIT"
] | null | null | null |
WindFarmGenetic.py
|
candleinwindsteve/WFLOP_SUGGA_Python
|
f9b0d0739d3cebfe194605b5696ba012841e3231
|
[
"MIT"
] | 9
|
2019-07-20T17:52:35.000Z
|
2021-10-18T13:42:50.000Z
|
import math
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import time
from datetime import datetime
__version__ = "1.0.0"
class WindFarmGenetic:
elite_rate = 0.2 # elite rate: parameter for genetic algorithm
cross_rate = 0.6 # crossover rate: parameter for genetic algorithm
random_rate = 0.5 # random rate: parameter for genetic algorithm
mutate_rate = 0.1 # mutation rate: parameter for genetic algorithm
turbine = None
pop_size = 0 # population size : how many individuals in a population
N = 0 # number of wind turbines
rows = 0 # how many cell rows the wind farm are divided into
cols = 0 # how many colus the wind farm land are divided into
iteration = 0 # how many iterations the genetic algorithm run
NA_loc=None # not available, not usable locations index list (the index starts from 1)
cell_width = 0 # cell width
cell_width_half = 0 # half cell width
# constructor of the class
def __init__(self, rows=21, cols=21, N=0,NA_loc=None, pop_size=100, iteration=200,cell_width=0, elite_rate=0.2,
cross_rate=0.6, random_rate=0.5, mutate_rate=0.1):
self.turbine = GE_1_5_sleTurbine()
self.rows = rows
self.cols = cols
self.N = N
self.pop_size = pop_size
self.iteration = iteration
self.cell_width = cell_width
self.cell_width_half = cell_width * 0.5
self.elite_rate = elite_rate
self.cross_rate = cross_rate
self.random_rate = random_rate
self.mutate_rate = mutate_rate
self.init_pop = None
self.init_pop_NA = None
self.init_pop_nonezero_indices = None
self.NA_loc=NA_loc
return
# choose wind distribution : 1 direction, 1 speed
def init_1_direction_1_N_speed_13(self):
self.theta = np.array([0], dtype=np.float32)
self.velocity = np.array([13.0], dtype=np.float32)
self.f_theta_v = np.array([[1.0]], dtype=np.float32)
return
# choose wind distribution : 4 directions, 1 speed
def init_4_direction_1_speed_13(self):
self.theta = np.array(
[0, 3 * np.pi / 6.0, 6 * np.pi / 6.0, 9 * np.pi / 6.0], dtype=np.float32) # 1.0/4
self.velocity = np.array([13.0], dtype=np.float32) # 1
self.f_theta_v = np.array([[0.25], [0.25], [0.25], [0.25]], dtype=np.float32)
return
# choose wind distribution : 6 direction, 1 speed
def init_6_direction_1_speed_13(self):
self.theta = np.array([0, np.pi / 3.0, 2 * np.pi / 3.0, 3 * np.pi / 3.0, 4 * np.pi / 3.0, 5 * np.pi / 3.0],
dtype=np.float32) # 0.2, 0,3 0.2 0. 1 0.1 0.1
self.velocity = np.array([13.0], dtype=np.float32) # 1
self.f_theta_v = np.array([[0.2], [0.3], [0.2], [0.1], [0.1], [0.1]], dtype=np.float32)
return
# the total cost
def cost(self, N):
return 1.0 * N * (2.0 / 3.0 + 1.0 / 3.0 * math.exp(-0.00174 * N ** 2))
# generate initial population
def gen_init_pop_NA(self):
self.init_pop,self.init_pop_NA = LayoutGridMCGenerator.gen_pop_with_NA_loc(rows=self.rows, cols=self.cols,NA_loc=self.NA_loc, n=self.pop_size, N=self.N)
self.init_pop_nonezero_indices = np.zeros((self.pop_size, self.N), dtype=np.int32)
for ind_init_pop in range(self.pop_size):
ind_indices = 0
for ind in range(self.rows * self.cols):
if self.init_pop[ind_init_pop, ind] == 1:
self.init_pop_nonezero_indices[ind_init_pop, ind_indices] = ind
ind_indices += 1
return
# save initial population
def save_init_pop_NA(self, fname,fname_NA):
np.savetxt(fname, self.init_pop, fmt='%d', delimiter=" ")
np.savetxt(fname_NA, self.init_pop_NA, fmt='%d', delimiter=" ")
return
# load initial population
def load_init_pop_NA(self, fname,fname_NA):
self.init_pop = np.genfromtxt(fname, delimiter=" ", dtype=np.int32)
self.init_pop_NA = np.genfromtxt(fname_NA, delimiter=" ", dtype=np.int32)
self.init_pop_nonezero_indices = np.zeros((self.pop_size, self.N), dtype=np.int32)
for ind_init_pop in range(self.pop_size):
ind_indices = 0
for ind in range(self.rows * self.cols):
if self.init_pop[ind_init_pop, ind] == 1:
self.init_pop_nonezero_indices[ind_init_pop, ind_indices] = ind
ind_indices += 1
return
# calculate total rate power
def cal_P_rate_total(self):
f_p = 0.0
for ind_t in range(len(self.theta)):
for ind_v in range(len(self.velocity)):
f_p += self.f_theta_v[ind_t, ind_v] * self.turbine.P_i_X(self.velocity[ind_v])
return self.N * f_p
# generate the location index coordinate and average power output at each location index coordinate
# location index coordinate : in the cells, the cell with index 1 has location index (0,0) and the cell 2 has (1,0)
# store the location index coordinate in x.dat and average power in y.dat
def mc_gen_xy_NA(self, rows, cols, layouts, n, N, xfname, yfname):
layouts_cr = np.zeros((rows * cols, 2), dtype=np.int32) # layouts column row index
n_copies = np.sum(layouts, axis=0)
layouts_power = np.zeros((n, rows * cols), dtype=np.float32)
self.mc_fitness(pop=layouts, rows=rows, cols=cols, pop_size=n, N=N, lp=layouts_power)
sum_layout_power = np.sum(layouts_power, axis=0)
mean_power = np.zeros(rows * cols, dtype=np.float32)
for i in range(rows * cols):
if n_copies[i]>0:
mean_power[i] = sum_layout_power[i] / n_copies[i]
for ind in range(rows * cols):
r_i = np.floor(ind / cols)
c_i = np.floor(ind - r_i * cols)
layouts_cr[ind, 0] = c_i
layouts_cr[ind, 1] = r_i
np.savetxt(xfname, layouts_cr, fmt='%d', delimiter=" ")
np.savetxt(yfname, mean_power, fmt='%f', delimiter=" ")
return
# calculate fitness value of the population
def mc_fitness(self, pop, rows, cols, pop_size, N, lp):
for i in range(pop_size):
print("layout {}...".format(i))
xy_position = np.zeros((2, N), dtype=np.float32) # x y position
cr_position = np.zeros((2, N), dtype=np.int32) # column row position
ind_position = np.zeros(N, dtype=np.int32)
ind_pos = 0
for ind in range(rows * cols):
if pop[i, ind] == 1:
r_i = np.floor(ind / cols)
c_i = np.floor(ind - r_i * cols)
cr_position[0, ind_pos] = c_i
cr_position[1, ind_pos] = r_i
xy_position[0, ind_pos] = c_i * self.cell_width + self.cell_width_half
xy_position[1, ind_pos] = r_i * self.cell_width + self.cell_width_half
ind_position[ind_pos] = ind
ind_pos += 1
lp_power_accum = np.zeros(N, dtype=np.float32) # a specific layout power accumulate
for ind_t in range(len(self.theta)):
for ind_v in range(len(self.velocity)):
trans_matrix = np.array(
[[np.cos(self.theta[ind_t]), -np.sin(self.theta[ind_t])],
[np.sin(self.theta[ind_t]), np.cos(self.theta[ind_t])]],
np.float32)
trans_xy_position = np.matmul(trans_matrix, xy_position)
speed_deficiency = self.wake_calculate(trans_xy_position, N)
actual_velocity = (1 - speed_deficiency) * self.velocity[ind_v]
lp_power = self.layout_power(actual_velocity,
N) # total power of a specific layout specific wind speed specific theta
lp_power = lp_power * self.f_theta_v[ind_t, ind_v]
lp_power_accum += lp_power
lp[i, ind_position] = lp_power_accum
return
# calculate wake effect
def wake_calculate(self, trans_xy_position, N):
sorted_index = np.argsort(-trans_xy_position[1, :]) # y value descending
wake_deficiency = np.zeros(N, dtype=np.float32)
wake_deficiency[sorted_index[0]] = 0
for i in range(1, N):
for j in range(i):
xdis = np.absolute(trans_xy_position[0, sorted_index[i]] - trans_xy_position[0, sorted_index[j]])
ydis = np.absolute(trans_xy_position[1, sorted_index[i]] - trans_xy_position[1, sorted_index[j]])
d = self.cal_deficiency(dx=xdis, dy=ydis, r=self.turbine.rator_radius,
ec=self.turbine.entrainment_const)
wake_deficiency[sorted_index[i]] += d ** 2
wake_deficiency[sorted_index[i]] = np.sqrt(wake_deficiency[sorted_index[i]])
return wake_deficiency
# ec : entrainment_const
def cal_deficiency(self, dx, dy, r, ec):
if dy == 0:
return 0
R = r + ec * dy
inter_area = self.cal_interaction_area(dx=dx, dy=dy, r=r, R=R)
d = 2.0 / 3.0 * (r ** 2) / (R ** 2) * inter_area / (np.pi * r ** 2)
return d
#calculate ineraction area
def cal_interaction_area(self, dx, dy, r, R):
if dx >= r + R:
return 0
elif dx >= np.sqrt(R ** 2 - r ** 2):
alpha = np.arccos((R ** 2 + dx ** 2 - r ** 2) / (2 * R * dx))
beta = np.arccos((r ** 2 + dx ** 2 - R ** 2) / (2 * r * dx))
A1 = alpha * R ** 2
A2 = beta * r ** 2
A3 = R * dx * np.sin(alpha)
return A1 + A2 - A3
elif dx >= R - r:
alpha = np.arccos((R ** 2 + dx ** 2 - r ** 2) / (2 * R * dx))
beta = np.pi - np.arccos((r ** 2 + dx ** 2 - R ** 2) / (2 * r * dx))
A1 = alpha * R ** 2
A2 = beta * r ** 2
A3 = R * dx * np.sin(alpha)
return np.pi * r ** 2 - (A2 + A3 - A1)
else:
return np.pi * r ** 2
def layout_power(self, velocity, N):
power = np.zeros(N, dtype=np.float32)
for i in range(N):
power[i] = self.turbine.P_i_X(velocity[i])
return power
# conventional genetic algorithm
def conventional_genetic_alg(self, ind_time=0,result_folder=None): # conventional genetic algorithm
P_rate_total = self.cal_P_rate_total()
start_time = datetime.now()
print("conventional genetic algorithm starts....")
fitness_generations = np.zeros(self.iteration, dtype=np.float32) # best fitness value in each generation
best_layout_generations = np.zeros((self.iteration, self.rows * self.cols),
dtype=np.int32) # best layout in each generation
best_layout_NA_generations = np.zeros((self.iteration, self.rows * self.cols),
dtype=np.int32) # best layout in each generation
power_order = np.zeros((self.pop_size, self.N),
dtype=np.int32) # each row is a layout cell indices. in each layout, order turbine power from least to largest
pop = np.copy(self.init_pop)
pop_NA = np.copy(self.init_pop_NA)
pop_indices = np.copy(self.init_pop_nonezero_indices) # each row is a layout cell indices.
eN = int(np.floor(self.pop_size * self.elite_rate)) # elite number
rN = int(int(np.floor(self.pop_size * self.mutate_rate)) / eN) * eN # reproduce number
mN = rN # mutation number
cN = self.pop_size - eN - mN # crossover number
for gen in range(self.iteration):
print("generation {}...".format(gen))
fitness_value = self.conventional_fitness(pop=pop, rows=self.rows, cols=self.cols, pop_size=self.pop_size,
N=self.N,
po=power_order)
sorted_index = np.argsort(-fitness_value) # fitness value descending from largest to least
pop = pop[sorted_index, :]
pop_NA=pop_NA[sorted_index, :]
power_order = power_order[sorted_index, :]
pop_indices = pop_indices[sorted_index, :]
if gen == 0:
fitness_generations[gen] = fitness_value[sorted_index[0]]
best_layout_generations[gen, :] = pop[0, :]
best_layout_NA_generations[gen, :] = pop_NA[0, :]
else:
if fitness_value[sorted_index[0]] > fitness_generations[gen - 1]:
fitness_generations[gen] = fitness_value[sorted_index[0]]
best_layout_generations[gen, :] = pop[0, :]
best_layout_NA_generations[gen, :] = pop_NA[0, :]
else:
fitness_generations[gen] = fitness_generations[gen - 1]
best_layout_generations[gen, :] = best_layout_generations[gen - 1, :]
best_layout_NA_generations[gen, :] = best_layout_NA_generations[gen - 1, :]
n_parents, parent_layouts,parent_layouts_NA, parent_pop_indices = self.conventional_select(pop=pop,pop_NA=pop_NA, pop_indices=pop_indices,
pop_size=self.pop_size,
elite_rate=self.elite_rate,
random_rate=self.random_rate)
self.conventional_crossover(N=self.N, pop=pop,pop_NA=pop_NA, pop_indices=pop_indices, pop_size=self.pop_size,
n_parents=n_parents,
parent_layouts=parent_layouts,parent_layouts_NA=parent_layouts_NA, parent_pop_indices=parent_pop_indices)
self.conventional_mutation(rows=self.rows, cols=self.cols, N=self.N, pop=pop,pop_NA= pop_NA,pop_indices=pop_indices,
pop_size=self.pop_size,
mutation_rate=self.mutate_rate)
end_time = datetime.now()
run_time = (end_time - start_time).total_seconds()
eta_generations = np.copy(fitness_generations)
eta_generations = eta_generations * (1.0 / P_rate_total)
time_stamp = datetime.now().strftime("%Y%m%d%H%M%S")
filename = "{}/conventional_eta_N{}_{}_{}.dat".format(result_folder,self.N, ind_time, time_stamp)
np.savetxt(filename, eta_generations, fmt='%f', delimiter=" ")
filename = "{}/conventional_best_layouts_N{}_{}_{}.dat".format(result_folder,self.N, ind_time, time_stamp)
np.savetxt(filename, best_layout_generations, fmt='%d', delimiter=" ")
filename = "{}/conventional_best_layouts_NA_N{}_{}_{}.dat".format(result_folder,self.N, ind_time, time_stamp)
np.savetxt(filename, best_layout_NA_generations, fmt='%d', delimiter=" ")
print("conventional genetic algorithm ends.")
filename = "{}/conventional_runtime.txt".format(result_folder)
f = open(filename, "a+")
f.write("{}\n".format(run_time))
f.close()
filename = "{}/conventional_eta.txt".format(result_folder)
f = open(filename, "a+")
f.write("{}\n".format(eta_generations[self.iteration - 1]))
f.close()
return run_time, eta_generations[self.iteration - 1]
def conventional_mutation(self, rows, cols, N, pop,pop_NA, pop_indices, pop_size, mutation_rate):
np.random.seed(seed=int(time.time()))
for i in range(pop_size):
if np.random.randn() > mutation_rate:
continue
while True:
turbine_pos = np.random.randint(0, cols * rows)
if pop_NA[i, turbine_pos] == 1:
break
while True:
null_turbine_pos = np.random.randint(0, cols * rows)
if pop_NA[i, null_turbine_pos] == 0:
break
pop[i, turbine_pos] = 0
pop[i, null_turbine_pos] = 1
pop_NA[i, turbine_pos] = 0
pop_NA[i, null_turbine_pos] = 1
for j in range(N):
if pop_indices[i, j] == turbine_pos:
pop_indices[i, j] = null_turbine_pos
break
pop_indices[i, :] = np.sort(pop_indices[i, :])
return
def conventional_crossover(self, N, pop,pop_NA, pop_indices, pop_size, n_parents,
parent_layouts,parent_layouts_NA, parent_pop_indices):
n_counter = 0
np.random.seed(seed=int(time.time())) # init random seed
while n_counter < pop_size:
male = np.random.randint(0, n_parents)
female = np.random.randint(0, n_parents)
if male != female:
cross_point = np.random.randint(1, N)
if parent_pop_indices[male, cross_point - 1] < parent_pop_indices[female, cross_point]:
pop[n_counter, :] = 0
pop[n_counter, :parent_pop_indices[male, cross_point - 1] + 1] = parent_layouts[male,
:parent_pop_indices[
male, cross_point - 1] + 1]
pop[n_counter, parent_pop_indices[female, cross_point]:] = parent_layouts[female,
parent_pop_indices[female, cross_point]:]
pop_NA[n_counter, :] = pop[n_counter, :]
for i in self.NA_loc:
pop_NA[n_counter,i-1]=2
pop_indices[n_counter, :cross_point] = parent_pop_indices[male, :cross_point]
pop_indices[n_counter, cross_point:] = parent_pop_indices[female, cross_point:]
n_counter += 1
return
def conventional_select(self, pop,pop_NA, pop_indices, pop_size, elite_rate, random_rate):
n_elite = int(pop_size * elite_rate)
parents_ind = [i for i in range(n_elite)]
np.random.seed(seed=int(time.time())) # init random seed
for i in range(n_elite, pop_size):
if np.random.randn() < random_rate:
parents_ind.append(i)
parent_layouts = pop[parents_ind, :]
parent_layouts_NA = pop_NA[parents_ind, :]
parent_pop_indices = pop_indices[parents_ind, :]
return len(parent_pop_indices), parent_layouts,parent_layouts_NA, parent_pop_indices
def conventional_fitness(self, pop, rows, cols, pop_size, N, po):
fitness_val = np.zeros(pop_size, dtype=np.float32)
for i in range(pop_size):
# layout = np.reshape(pop[i, :], newshape=(rows, cols))
xy_position = np.zeros((2, N), dtype=np.float32) # x y position
cr_position = np.zeros((2, N), dtype=np.int32) # column row position
ind_position = np.zeros(N, dtype=np.int32)
ind_pos = 0
for ind in range(rows * cols):
if pop[i, ind] == 1:
r_i = np.floor(ind / cols)
c_i = np.floor(ind - r_i * cols)
cr_position[0, ind_pos] = c_i
cr_position[1, ind_pos] = r_i
xy_position[0, ind_pos] = c_i * self.cell_width + self.cell_width_half
xy_position[1, ind_pos] = r_i * self.cell_width + self.cell_width_half
ind_position[ind_pos] = ind
ind_pos += 1
lp_power_accum = np.zeros(N, dtype=np.float32) # a specific layout power accumulate
for ind_t in range(len(self.theta)):
for ind_v in range(len(self.velocity)):
trans_matrix = np.array(
[[np.cos(self.theta[ind_t]), -np.sin(self.theta[ind_t])],
[np.sin(self.theta[ind_t]), np.cos(self.theta[ind_t])]],
np.float32)
trans_xy_position = np.matmul(trans_matrix, xy_position)
speed_deficiency = self.wake_calculate(trans_xy_position, N)
actual_velocity = (1 - speed_deficiency) * self.velocity[ind_v]
lp_power = self.layout_power(actual_velocity,
N) # total power of a specific layout specific wind speed specific theta
lp_power = lp_power * self.f_theta_v[ind_t, ind_v]
lp_power_accum += lp_power
sorted_index = np.argsort(lp_power_accum) # power from least to largest
po[i, :] = ind_position[sorted_index]
fitness_val[i] = np.sum(lp_power_accum)
return fitness_val
# AGA: adaptive genetic algorithm
def adaptive_genetic_alg(self, ind_time=0,result_folder=None): # adaptive genetic algorithm
P_rate_total = self.cal_P_rate_total()
start_time = datetime.now()
print("adaptive genetic algorithm starts....")
fitness_generations = np.zeros(self.iteration, dtype=np.float32) # best fitness value in each generation
best_layout_generations = np.zeros((self.iteration, self.rows * self.cols),
dtype=np.int32) # best layout in each generation
best_layout_NA_generations = np.zeros((self.iteration, self.rows * self.cols),
dtype=np.int32) # best layout in each generation
power_order = np.zeros((self.pop_size, self.N),
dtype=np.int32) # each row is a layout cell indices. in each layout, order turbine power from least to largest
pop = np.copy(self.init_pop)
pop_NA = np.copy(self.init_pop_NA)
pop_indices = np.copy(self.init_pop_nonezero_indices) # each row is a layout cell indices.
eN = int(np.floor(self.pop_size * self.elite_rate)) # elite number
rN = int(int(np.floor(self.pop_size * self.mutate_rate)) / eN) * eN # reproduce number
mN = rN # mutation number
cN = self.pop_size - eN - mN # crossover number
for gen in range(self.iteration):
print("generation {}...".format(gen))
fitness_value = self.adaptive_fitness(pop=pop, rows=self.rows, cols=self.cols, pop_size=self.pop_size,
N=self.N,
po=power_order)
sorted_index = np.argsort(-fitness_value) # fitness value descending from largest to least
pop = pop[sorted_index, :]
pop_NA = pop_NA[sorted_index, :]
power_order = power_order[sorted_index, :]
pop_indices = pop_indices[sorted_index, :]
if gen == 0:
fitness_generations[gen] = fitness_value[sorted_index[0]]
best_layout_generations[gen, :] = pop[0, :]
best_layout_NA_generations[gen, :] = pop_NA[0, :]
else:
if fitness_value[sorted_index[0]] > fitness_generations[gen - 1]:
fitness_generations[gen] = fitness_value[sorted_index[0]]
best_layout_generations[gen, :] = pop[0, :]
best_layout_NA_generations[gen, :] = pop_NA[0, :]
else:
fitness_generations[gen] = fitness_generations[gen - 1]
best_layout_generations[gen, :] = best_layout_generations[gen - 1, :]
best_layout_NA_generations[gen, :] = best_layout_NA_generations[gen - 1, :]
self.adaptive_move_worst(rows=self.rows, cols=self.cols, pop=pop,pop_NA=pop_NA, pop_indices=pop_indices,
pop_size=self.pop_size, power_order=power_order)
n_parents, parent_layouts,parent_layouts_NA, parent_pop_indices = self.adaptive_select(pop=pop,pop_NA=pop_NA, pop_indices=pop_indices,
pop_size=self.pop_size,
elite_rate=self.elite_rate,
random_rate=self.random_rate)
self.adaptive_crossover(N=self.N, pop=pop,pop_NA=pop_NA, pop_indices=pop_indices, pop_size=self.pop_size,
n_parents=n_parents,
parent_layouts=parent_layouts,parent_layouts_NA=parent_layouts_NA, parent_pop_indices=parent_pop_indices)
self.adaptive_mutation(rows=self.rows, cols=self.cols, N=self.N, pop=pop,pop_NA=pop_NA, pop_indices=pop_indices,
pop_size=self.pop_size,
mutation_rate=self.mutate_rate)
end_time = datetime.now()
run_time = (end_time - start_time).total_seconds()
eta_generations = np.copy(fitness_generations)
eta_generations = eta_generations * (1.0 / P_rate_total)
time_stamp = datetime.now().strftime("%Y%m%d%H%M%S")
filename = "{}/adaptive_eta_N{}_{}_{}.dat".format(result_folder,self.N, ind_time, time_stamp)
np.savetxt(filename, eta_generations, fmt='%f', delimiter=" ")
filename = "{}/adaptive_best_layouts_N{}_{}_{}.dat".format(result_folder,self.N, ind_time, time_stamp)
np.savetxt(filename, best_layout_generations, fmt='%d', delimiter=" ")
filename = "{}/adaptive_best_layouts_NA_N{}_{}_{}.dat".format(result_folder,self.N, ind_time, time_stamp)
np.savetxt(filename, best_layout_NA_generations, fmt='%d', delimiter=" ")
print("adaptive genetic algorithm ends.")
filename = "{}/adaptive_runtime.txt".format(result_folder)
f = open(filename, "a+")
f.write("{}\n".format(run_time))
f.close()
filename = "{}/adaptive_eta.txt".format(result_folder)
f = open(filename, "a+")
f.write("{}\n".format(eta_generations[self.iteration - 1]))
f.close()
return run_time, eta_generations[self.iteration - 1]
def adaptive_move_worst(self, rows, cols, pop,pop_NA, pop_indices, pop_size, power_order):
np.random.seed(seed=int(time.time()))
for i in range(pop_size):
turbine_pos = power_order[i, 0]
while True:
null_turbine_pos = np.random.randint(0, cols * rows)
if pop_NA[i, null_turbine_pos] == 0:
break
pop[i, turbine_pos] = 0
pop[i, null_turbine_pos] = 1
pop_NA[i, turbine_pos] = 0
pop_NA[i, null_turbine_pos] = 1
power_order[i, 0] = null_turbine_pos
pop_indices[i, :] = np.sort(power_order[i, :])
return
def adaptive_mutation(self, rows, cols, N, pop,pop_NA, pop_indices, pop_size, mutation_rate):
np.random.seed(seed=int(time.time()))
for i in range(pop_size):
if np.random.randn() > mutation_rate:
continue
while True:
turbine_pos = np.random.randint(0, cols * rows)
if pop_NA[i, turbine_pos] == 1:
break
while True:
null_turbine_pos = np.random.randint(0, cols * rows)
if pop_NA[i, null_turbine_pos] == 0:
break
pop[i, turbine_pos] = 0
pop[i, null_turbine_pos] = 1
pop_NA[i, turbine_pos] = 0
pop_NA[i, null_turbine_pos] = 1
for j in range(N):
if pop_indices[i, j] == turbine_pos:
pop_indices[i, j] = null_turbine_pos
break
pop_indices[i, :] = np.sort(pop_indices[i, :])
return
def adaptive_crossover(self, N, pop,pop_NA, pop_indices, pop_size, n_parents,
parent_layouts,parent_layouts_NA, parent_pop_indices):
n_counter = 0
np.random.seed(seed=int(time.time())) # init random seed
while n_counter < pop_size:
male = np.random.randint(0, n_parents)
female = np.random.randint(0, n_parents)
if male != female:
cross_point = np.random.randint(1, N)
if parent_pop_indices[male, cross_point - 1] < parent_pop_indices[female, cross_point]:
pop[n_counter, :] = 0
pop[n_counter, :parent_pop_indices[male, cross_point - 1] + 1] = parent_layouts[male,
:parent_pop_indices[
male, cross_point - 1] + 1]
pop[n_counter, parent_pop_indices[female, cross_point]:] = parent_layouts[female,
parent_pop_indices[female, cross_point]:]
pop_NA[n_counter, :] = pop[n_counter, :]
for i in self.NA_loc:
pop_NA[n_counter, i - 1] = 2
pop_indices[n_counter, :cross_point] = parent_pop_indices[male, :cross_point]
pop_indices[n_counter, cross_point:] = parent_pop_indices[female, cross_point:]
n_counter += 1
return
def adaptive_select(self, pop,pop_NA, pop_indices, pop_size, elite_rate, random_rate):
n_elite = int(pop_size * elite_rate)
parents_ind = [i for i in range(n_elite)]
np.random.seed(seed=int(time.time())) # init random seed
for i in range(n_elite, pop_size):
if np.random.randn() < random_rate:
parents_ind.append(i)
parent_layouts = pop[parents_ind, :]
parent_layouts_NA = pop_NA[parents_ind, :]
parent_pop_indices = pop_indices[parents_ind, :]
return len(parent_pop_indices), parent_layouts, parent_layouts_NA, parent_pop_indices
def adaptive_fitness(self, pop, rows, cols, pop_size, N, po):
fitness_val = np.zeros(pop_size, dtype=np.float32)
for i in range(pop_size):
# layout = np.reshape(pop[i, :], newshape=(rows, cols))
xy_position = np.zeros((2, N), dtype=np.float32) # x y position
cr_position = np.zeros((2, N), dtype=np.int32) # column row position
ind_position = np.zeros(N, dtype=np.int32)
ind_pos = 0
for ind in range(rows * cols):
if pop[i, ind] == 1:
r_i = np.floor(ind / cols)
c_i = np.floor(ind - r_i * cols)
cr_position[0, ind_pos] = c_i
cr_position[1, ind_pos] = r_i
xy_position[0, ind_pos] = c_i * self.cell_width + self.cell_width_half
xy_position[1, ind_pos] = r_i * self.cell_width + self.cell_width_half
ind_position[ind_pos] = ind
ind_pos += 1
lp_power_accum = np.zeros(N, dtype=np.float32) # a specific layout power accumulate
for ind_t in range(len(self.theta)):
for ind_v in range(len(self.velocity)):
trans_matrix = np.array(
[[np.cos(self.theta[ind_t]), -np.sin(self.theta[ind_t])],
[np.sin(self.theta[ind_t]), np.cos(self.theta[ind_t])]],
np.float32)
trans_xy_position = np.matmul(trans_matrix, xy_position)
speed_deficiency = self.wake_calculate(trans_xy_position, N)
actual_velocity = (1 - speed_deficiency) * self.velocity[ind_v]
lp_power = self.layout_power(actual_velocity,
N) # total power of a specific layout specific wind speed specific theta
lp_power = lp_power * self.f_theta_v[ind_t, ind_v]
lp_power_accum += lp_power
sorted_index = np.argsort(lp_power_accum) # power from least to largest
po[i, :] = ind_position[sorted_index]
fitness_val[i] = np.sum(lp_power_accum)
return fitness_val
# SUGGA: support vector regression guided genetic algorithm
def sugga_genetic_alg(self, ind_time=0,svr_model=None,result_folder=None):
P_rate_total = self.cal_P_rate_total()
start_time = datetime.now()
print("Support vector regression guided genetic algorithm starts....")
fitness_generations = np.zeros(self.iteration, dtype=np.float32) # best fitness value in each generation
best_layout_generations = np.zeros((self.iteration, self.rows * self.cols),
dtype=np.int32) # best layout in each generation
best_layout_NA_generations = np.zeros((self.iteration, self.rows * self.cols),
dtype=np.int32) # best layout in each generation
power_order = np.zeros((self.pop_size, self.N),
dtype=np.int32) # each row is a layout cell indices. in each layout, order turbine power from least to largest
pop = np.copy(self.init_pop)
pop_NA = np.copy(self.init_pop_NA)
pop_indices = np.copy(self.init_pop_nonezero_indices) # each row is a layout cell indices.
eN = int(np.floor(self.pop_size * self.elite_rate)) # elite number
rN = int(int(np.floor(self.pop_size * self.mutate_rate)) / eN) * eN # reproduce number
mN = rN # mutation number
cN = self.pop_size - eN - mN # crossover number
for gen in range(self.iteration):
print("generation {}...".format(gen))
fitness_value = self.sugga_fitness(pop=pop, rows=self.rows, cols=self.cols, pop_size=self.pop_size,
N=self.N,
po=power_order)
sorted_index = np.argsort(-fitness_value) # fitness value descending from largest to least
pop = pop[sorted_index, :]
pop_NA = pop_NA[sorted_index, :]
power_order = power_order[sorted_index, :]
pop_indices = pop_indices[sorted_index, :]
if gen == 0:
fitness_generations[gen] = fitness_value[sorted_index[0]]
best_layout_generations[gen, :] = pop[0, :]
best_layout_NA_generations[gen, :] = pop_NA[0, :]
else:
if fitness_value[sorted_index[0]] > fitness_generations[gen - 1]:
fitness_generations[gen] = fitness_value[sorted_index[0]]
best_layout_generations[gen, :] = pop[0, :]
best_layout_NA_generations[gen, :] = pop_NA[0, :]
else:
fitness_generations[gen] = fitness_generations[gen - 1]
best_layout_generations[gen, :] = best_layout_generations[gen - 1, :]
best_layout_NA_generations[gen, :] = best_layout_NA_generations[gen - 1, :]
self.sugga_move_worst(rows=self.rows, cols=self.cols, pop=pop,pop_NA=pop_NA, pop_indices=pop_indices,
pop_size=self.pop_size, power_order=power_order, svr_model=svr_model)
n_parents, parent_layouts,parent_layouts_NA, parent_pop_indices = self.sugga_select(pop=pop,pop_NA=pop_NA, pop_indices=pop_indices,
pop_size=self.pop_size,
elite_rate=self.elite_rate,
random_rate=self.random_rate)
self.sugga_crossover(N=self.N, pop=pop,pop_NA=pop_NA, pop_indices=pop_indices, pop_size=self.pop_size,
n_parents=n_parents,
parent_layouts=parent_layouts,parent_layouts_NA=parent_layouts_NA, parent_pop_indices=parent_pop_indices)
self.sugga_mutation(rows=self.rows, cols=self.cols, N=self.N, pop=pop,pop_NA=pop_NA, pop_indices=pop_indices,
pop_size=self.pop_size,
mutation_rate=self.mutate_rate)
end_time = datetime.now()
run_time = (end_time - start_time).total_seconds()
eta_generations = np.copy(fitness_generations)
eta_generations = eta_generations * (1.0 / P_rate_total)
time_stamp = datetime.now().strftime("%Y%m%d%H%M%S")
filename = "{}/sugga_eta_N{}_{}_{}.dat".format(result_folder,self.N, ind_time, time_stamp)
np.savetxt(filename, eta_generations, fmt='%f', delimiter=" ")
filename = "{}/sugga_best_layouts_N{}_{}_{}.dat".format(result_folder,self.N, ind_time, time_stamp)
np.savetxt(filename, best_layout_generations, fmt='%d', delimiter=" ")
filename = "{}/sugga_best_layouts_NA_N{}_{}_{}.dat".format(result_folder,self.N, ind_time, time_stamp)
np.savetxt(filename, best_layout_NA_generations, fmt='%d', delimiter=" ")
print("Support vector regression guided genetic algorithm ends.")
filename = "{}/sugga_runtime.txt".format(result_folder)
f = open(filename, "a+")
f.write("{}\n".format(run_time))
f.close()
filename = "{}/sugga_eta.txt".format(result_folder)
f = open(filename, "a+")
f.write("{}\n".format(eta_generations[self.iteration - 1]))
f.close()
return run_time, eta_generations[self.iteration - 1]
def sugga_move_worst(self, rows, cols, pop,pop_NA, pop_indices, pop_size, power_order, mars=None,svr_model=None):
np.random.seed(seed=int(time.time()))
for i in range(pop_size):
r = np.random.randn()
if r < 0.5:
self.sugga_move_worst_case_random(i=i, rows=rows, cols=cols, pop=pop,pop_NA=pop_NA, pop_indices=pop_indices,
pop_size=pop_size, power_order=power_order)
else:
self.sugga_move_worst_case_best(i=i, rows=rows, cols=cols, pop=pop,pop_NA=pop_NA, pop_indices=pop_indices,
pop_size=pop_size, power_order=power_order, mars=mars,svr_model=svr_model)
return
def sugga_move_worst_case_random(self, i, rows, cols, pop,pop_NA, pop_indices, pop_size, power_order):
np.random.seed(seed=int(time.time()))
turbine_pos = power_order[i, 0]
while True:
null_turbine_pos = np.random.randint(0, cols * rows)
if pop_NA[i, null_turbine_pos] == 0:
break
pop[i, turbine_pos] = 0
pop[i, null_turbine_pos] = 1
pop_NA[i, turbine_pos] = 0
pop_NA[i, null_turbine_pos] = 1
power_order[i, 0] = null_turbine_pos
pop_indices[i, :] = np.sort(power_order[i, :])
return
def sugga_move_worst_case_best(self, i, rows, cols, pop,pop_NA, pop_indices, pop_size, power_order, mars,svr_model):
np.random.seed(seed=int(time.time()))
n_candiate = 5
pos_candidate = np.zeros((n_candiate, 2), dtype=np.int32)
ind_pos_candidate = np.zeros(n_candiate, dtype=np.int32)
turbine_pos = power_order[i, 0]
ind_can = 0
while True:
null_turbine_pos = np.random.randint(0, cols * rows)
if pop_NA[i, null_turbine_pos] == 0:
pos_candidate[ind_can, 1] = int(np.floor(null_turbine_pos / cols))
pos_candidate[ind_can, 0] = int(np.floor(null_turbine_pos - pos_candidate[ind_can, 1] * cols))
ind_pos_candidate[ind_can] = null_turbine_pos
ind_can += 1
if ind_can == n_candiate:
break
svr_val = svr_model.predict(pos_candidate)
sorted_index = np.argsort(-svr_val) # fitness value descending from largest to least
null_turbine_pos = ind_pos_candidate[sorted_index[0]]
pop[i, turbine_pos] = 0
pop[i, null_turbine_pos] = 1
pop_NA[i, turbine_pos] = 0
pop_NA[i, null_turbine_pos] = 1
power_order[i, 0] = null_turbine_pos
pop_indices[i, :] = np.sort(power_order[i, :])
return
def sugga_move_worst_case_worst(self, i, rows, cols, pop, pop_indices, pop_size, power_order, mars):
np.random.seed(seed=int(time.time()))
n_candiate = 11
pos_candidate = np.zeros((n_candiate, 2), dtype=np.int32)
ind_pos_candidate = np.zeros(n_candiate, dtype=np.int32)
turbine_pos = power_order[i, 0]
ind_can = 0
while True:
null_turbine_pos = np.random.randint(0, cols * rows)
if pop[i, null_turbine_pos] == 0:
pos_candidate[ind_can, 1] = int(np.floor(null_turbine_pos / cols))
pos_candidate[ind_can, 0] = int(np.floor(null_turbine_pos - pos_candidate[ind_can, 1] * cols))
ind_pos_candidate[ind_can] = null_turbine_pos
ind_can += 1
if ind_can == n_candiate:
break
mars_val = mars.predict(pos_candidate)
mars_val = mars_val[:, 0]
sorted_index = np.argsort(mars_val) # fitness value descending from least to largest
null_turbine_pos = ind_pos_candidate[sorted_index[0]]
pop[i, turbine_pos] = 0
pop[i, null_turbine_pos] = 1
power_order[i, 0] = null_turbine_pos
pop_indices[i, :] = np.sort(power_order[i, :])
return
# SUGGA move worst
def sugga_move_worst_case_middle(self, i, rows, cols, pop, pop_indices, pop_size, power_order, mars):
np.random.seed(seed=int(time.time()))
n_candiate = 11
pos_candidate = np.zeros((n_candiate, 2), dtype=np.int32)
ind_pos_candidate = np.zeros(n_candiate, dtype=np.int32)
turbine_pos = power_order[i, 0]
ind_can = 0
while True:
null_turbine_pos = np.random.randint(0, cols * rows)
if pop[i, null_turbine_pos] == 0:
pos_candidate[ind_can, 1] = int(np.floor(null_turbine_pos / cols))
pos_candidate[ind_can, 0] = int(np.floor(null_turbine_pos - pos_candidate[ind_can, 1] * cols))
ind_pos_candidate[ind_can] = null_turbine_pos
ind_can += 1
if ind_can == n_candiate:
break
mars_val = mars.predict(pos_candidate)
mars_val = mars_val[:, 0]
sorted_index = np.argsort(-mars_val) # fitness value descending from largest to least
null_turbine_pos = ind_pos_candidate[sorted_index[5]]
pop[i, turbine_pos] = 0
pop[i, null_turbine_pos] = 1
power_order[i, 0] = null_turbine_pos
pop_indices[i, :] = np.sort(power_order[i, :])
return
# SUGGA mutation
def sugga_mutation(self, rows, cols, N, pop,pop_NA, pop_indices, pop_size, mutation_rate):
np.random.seed(seed=int(time.time()))
for i in range(pop_size):
if np.random.randn() > mutation_rate:
continue
while True:
turbine_pos = np.random.randint(0, cols * rows)
if pop_NA[i, turbine_pos] == 1:
break
while True:
null_turbine_pos = np.random.randint(0, cols * rows)
if pop_NA[i, null_turbine_pos] == 0:
break
pop[i, turbine_pos] = 0
pop[i, null_turbine_pos] = 1
pop_NA[i, turbine_pos] = 0
pop_NA[i, null_turbine_pos] = 1
for j in range(N):
if pop_indices[i, j] == turbine_pos:
pop_indices[i, j] = null_turbine_pos
break
pop_indices[i, :] = np.sort(pop_indices[i, :])
return
# SUGGA crossover
def sugga_crossover(self, N, pop,pop_NA, pop_indices, pop_size, n_parents,
parent_layouts,parent_layouts_NA, parent_pop_indices):
n_counter = 0
np.random.seed(seed=int(time.time())) # init random seed
while n_counter < pop_size:
male = np.random.randint(0, n_parents)
female = np.random.randint(0, n_parents)
if male != female:
cross_point = np.random.randint(1, N)
if parent_pop_indices[male, cross_point - 1] < parent_pop_indices[female, cross_point]:
pop[n_counter, :] = 0
pop[n_counter, :parent_pop_indices[male, cross_point - 1] + 1] = parent_layouts[male,
:parent_pop_indices[
male, cross_point - 1] + 1]
pop[n_counter, parent_pop_indices[female, cross_point]:] = parent_layouts[female,
parent_pop_indices[female, cross_point]:]
pop_NA[n_counter, :] = pop[n_counter, :]
for i in self.NA_loc:
pop_NA[n_counter, i - 1] = 2
pop_indices[n_counter, :cross_point] = parent_pop_indices[male, :cross_point]
pop_indices[n_counter, cross_point:] = parent_pop_indices[female, cross_point:]
n_counter += 1
return
# SUGGA select
def sugga_select(self, pop,pop_NA, pop_indices, pop_size, elite_rate, random_rate):
n_elite = int(pop_size * elite_rate)
parents_ind = [i for i in range(n_elite)]
np.random.seed(seed=int(time.time())) # init random seed
for i in range(n_elite, pop_size):
if np.random.randn() < random_rate:
parents_ind.append(i)
parent_layouts = pop[parents_ind, :]
parent_layouts_NA = pop_NA[parents_ind, :]
parent_pop_indices = pop_indices[parents_ind, :]
return len(parent_pop_indices), parent_layouts, parent_layouts_NA, parent_pop_indices
#calculate fitness value
def sugga_fitness(self, pop, rows, cols, pop_size, N, po):
fitness_val = np.zeros(pop_size, dtype=np.float32)
for i in range(pop_size):
# layout = np.reshape(pop[i, :], newshape=(rows, cols))
xy_position = np.zeros((2, N), dtype=np.float32) # x y position
cr_position = np.zeros((2, N), dtype=np.int32) # column row position
ind_position = np.zeros(N, dtype=np.int32)
ind_pos = 0
for ind in range(rows * cols):
if pop[i, ind] == 1:
r_i = np.floor(ind / cols)
c_i = np.floor(ind - r_i * cols)
cr_position[0, ind_pos] = c_i
cr_position[1, ind_pos] = r_i
xy_position[0, ind_pos] = c_i * self.cell_width + self.cell_width_half
xy_position[1, ind_pos] = r_i * self.cell_width + self.cell_width_half
ind_position[ind_pos] = ind
ind_pos += 1
lp_power_accum = np.zeros(N, dtype=np.float32) # a specific layout power accumulate
for ind_t in range(len(self.theta)):
for ind_v in range(len(self.velocity)):
# print(theta[ind_t])
# print(np.cos(theta[ind_t]))
trans_matrix = np.array(
[[np.cos(self.theta[ind_t]), -np.sin(self.theta[ind_t])],
[np.sin(self.theta[ind_t]), np.cos(self.theta[ind_t])]],
np.float32)
trans_xy_position = np.matmul(trans_matrix, xy_position)
speed_deficiency = self.wake_calculate(trans_xy_position, N)
actual_velocity = (1 - speed_deficiency) * self.velocity[ind_v]
lp_power = self.layout_power(actual_velocity,
N) # total power of a specific layout specific wind speed specific theta
lp_power = lp_power * self.f_theta_v[ind_t, ind_v]
lp_power_accum += lp_power
sorted_index = np.argsort(lp_power_accum) # power from least to largest
po[i, :] = ind_position[sorted_index]
fitness_val[i] = np.sum(lp_power_accum)
#
return fitness_val
class GE_1_5_sleTurbine:
hub_height = 80.0 # unit (m)
rator_diameter = 77.0 # unit m
surface_roughness = 0.25 * 0.001 # unit mm surface roughness
# surface_roughness = 0.25 # unit mm surface roughness
rator_radius = 0
entrainment_const = 0
def __init__(self):
self.rator_radius = self.rator_diameter / 2
self.entrainment_const = 0.5 / np.log(self.hub_height / self.surface_roughness)
return
# power curve
def P_i_X(self, v):
if v < 2.0:
return 0
elif v < 12.8:
return 0.3 * v ** 3
elif v < 18:
return 629.1
else:
return 0
class LayoutGridMCGenerator:
def __init__(self):
return
# rows : number of rows in wind farm
# cols : number of columns in wind farm
# n : number of layouts
# N : number of turbines
def gen_mc_grid(rows, cols, n, N, lofname): # , xfname): generate monte carlo wind farm layout grids
np.random.seed(seed=int(time.time())) # init random seed
layouts = np.zeros((n, rows * cols), dtype=np.int32) # one row is a layout
# layouts_cr = np.zeros((n*, 2), dtype=np.float32) # layouts column row index
positionX = np.random.randint(0, cols, size=(N * n * 2))
positionY = np.random.randint(0, rows, size=(N * n * 2))
ind_rows = 0 # index of layouts from 0 to n-1
ind_pos = 0 # index of positionX, positionY from 0 to N*n*2-1
# ind_crs = 0
while ind_rows < n:
layouts[ind_rows, positionX[ind_pos] + positionY[ind_pos] * cols] = 1
if np.sum(layouts[ind_rows, :]) == N:
# for ind in range(rows * cols):
# if layouts[ind_rows, ind] == 1:
# r_i = np.floor(ind / cols)
# c_i = np.floor(ind - r_i * cols)
# layouts_cr[ind_crs, 0] = c_i
# layouts_cr[ind_crs, 1] = r_i
# ind_crs += 1
ind_rows += 1
ind_pos += 1
if ind_pos >= N * n * 2:
print("Not enough positions")
break
# filename = "positions{}by{}by{}N{}.dat".format(rows, cols, n, N)
np.savetxt(lofname, layouts, fmt='%d', delimiter=" ")
# np.savetxt(xfname, layouts_cr, fmt='%d', delimiter=" ")
return layouts
# rows : number of rows in wind farm
# cols : number of columns in wind farm
# n : number of layouts
# N : number of turbines
# NA_loc : not usable locations
# generate layouts with not usable locations
def gen_mc_grid_with_NA_loc(rows, cols, n, N,NA_loc, lofname,loNAfname): # , xfname): generate monte carlo wind farm layout grids
np.random.seed(seed=int(time.time())) # init random seed
layouts = np.zeros((n, rows * cols), dtype=np.int32) # one row is a layout, NA loc is 0
layouts_NA= np.zeros((n, rows * cols), dtype=np.int32) # one row is a layout, NA loc is 2
for i in NA_loc:
layouts_NA[:,i-1]=2
# layouts_cr = np.zeros((n*, 2), dtype=np.float32) # layouts column row index
positionX = np.random.randint(0, cols, size=(N * n * 2))
positionY = np.random.randint(0, rows, size=(N * n * 2))
ind_rows = 0 # index of layouts from 0 to n-1
ind_pos = 0 # index of positionX, positionY from 0 to N*n*2-1
# ind_crs = 0
N_count=0
while ind_rows < n:
cur_state=layouts_NA[ind_rows, positionX[ind_pos] + positionY[ind_pos] * cols]
if cur_state!=1 and cur_state!=2:
layouts[ind_rows, positionX[ind_pos] + positionY[ind_pos] * cols]=1
layouts_NA[ind_rows, positionX[ind_pos] + positionY[ind_pos] * cols] = 1
N_count+=1
if np.sum(layouts[ind_rows, :]) == N:
ind_rows += 1
N_count=0
ind_pos += 1
if ind_pos >= N * n * 2:
print("Not enough positions")
break
# filename = "positions{}by{}by{}N{}.dat".format(rows, cols, n, N)
np.savetxt(lofname, layouts, fmt='%d', delimiter=" ")
np.savetxt(loNAfname, layouts_NA, fmt='%d', delimiter=" ")
# np.savetxt(xfname, layouts_cr, fmt='%d', delimiter=" ")
return layouts,layouts_NA
# generate population
def gen_pop(rows, cols, n,
N): # generate population very similar to gen_mc_grid, just without saving layouts to a file
np.random.seed(seed=int(time.time()))
layouts = np.zeros((n, rows * cols), dtype=np.int32)
positionX = np.random.randint(0, cols, size=(N * n * 2))
positionY = np.random.randint(0, rows, size=(N * n * 2))
ind_rows = 0
ind_pos = 0
while ind_rows < n:
layouts[ind_rows, positionX[ind_pos] + positionY[ind_pos] * cols] = 1
if np.sum(layouts[ind_rows, :]) == N:
ind_rows += 1
ind_pos += 1
if ind_pos >= N * n * 2:
print("Not enough positions")
break
return layouts
# rows : number of rows in wind farm
# cols : number of columns in wind farm
# n : number of layouts
# N : number of turbines
# NA_loc : not usable locations
# generate layouts with not usable locations
def gen_pop_with_NA_loc(rows, cols, n, N, NA_loc):
np.random.seed(seed=int(time.time())) # init random seed
layouts = np.zeros((n, rows * cols), dtype=np.int32) # one row is a layout, NA loc is 0
layouts_NA = np.zeros((n, rows * cols), dtype=np.int32) # one row is a layout, NA loc is 2
for i in NA_loc:
layouts_NA[:, i - 1] = 2
# layouts_cr = np.zeros((n*, 2), dtype=np.float32) # layouts column row index
positionX = np.random.randint(0, cols, size=(N * n * 2))
positionY = np.random.randint(0, rows, size=(N * n * 2))
ind_rows = 0 # index of layouts from 0 to n-1
ind_pos = 0 # index of positionX, positionY from 0 to N*n*2-1
# ind_crs = 0
N_count = 0
while ind_rows < n:
cur_state = layouts_NA[ind_rows, positionX[ind_pos] + positionY[ind_pos] * cols]
if cur_state != 1 and cur_state != 2:
layouts[ind_rows, positionX[ind_pos] + positionY[ind_pos] * cols] = 1
layouts_NA[ind_rows, positionX[ind_pos] + positionY[ind_pos] * cols] = 1
N_count += 1
if np.sum(layouts[ind_rows, :]) == N:
ind_rows += 1
N_count = 0
ind_pos += 1
if ind_pos >= N * n * 2:
print("Not enough positions")
break
return layouts, layouts_NA
| 51.269127
| 161
| 0.55244
| 7,402
| 56,960
| 4.004323
| 0.043096
| 0.040823
| 0.023617
| 0.014676
| 0.860459
| 0.838023
| 0.823954
| 0.810999
| 0.807153
| 0.799359
| 0
| 0.019038
| 0.344329
| 56,960
| 1,110
| 162
| 51.315315
| 0.7746
| 0.099052
| 0
| 0.773481
| 0
| 0
| 0.020045
| 0.007994
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047514
| false
| 0
| 0.00663
| 0.00221
| 0.132597
| 0.01547
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58fc61a8acf089105edf2fb03443fb259136dc45
| 293
|
py
|
Python
|
running_modes/configurations/reinforcement_learning/__init__.py
|
fujirock/Reinvent
|
9c57636f9d32b4ce5b75670f43906a70d5daf886
|
[
"MIT"
] | 4
|
2021-05-11T05:34:01.000Z
|
2022-03-30T10:04:21.000Z
|
running_modes/configurations/reinforcement_learning/__init__.py
|
prasannavd/Reinvent
|
ca02ebee8d8ed83223c55f4a1dd1b3fbc2359616
|
[
"MIT"
] | null | null | null |
running_modes/configurations/reinforcement_learning/__init__.py
|
prasannavd/Reinvent
|
ca02ebee8d8ed83223c55f4a1dd1b3fbc2359616
|
[
"MIT"
] | 2
|
2021-06-01T11:56:10.000Z
|
2021-10-05T04:33:56.000Z
|
from running_modes.configurations.reinforcement_learning.inception_configuration import *
from running_modes.configurations.reinforcement_learning.reinforcement_learning_components import *
from running_modes.configurations.reinforcement_learning.reinforcement_learning_configuration import *
| 73.25
| 102
| 0.918089
| 29
| 293
| 8.896552
| 0.344828
| 0.406977
| 0.186047
| 0.348837
| 0.802326
| 0.802326
| 0.604651
| 0.604651
| 0.604651
| 0
| 0
| 0
| 0.040956
| 293
| 3
| 103
| 97.666667
| 0.918149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
18d27315cd409a25dd021e7f5764e06eba76b7cc
| 110
|
py
|
Python
|
pixelflow/transforms/functional/__init__.py
|
didriknielsen/pixelcnn_flow
|
9030f6a66d5ff83d7d299541ed55b20b20bb9a15
|
[
"MIT"
] | 25
|
2020-02-12T00:35:48.000Z
|
2021-09-18T14:30:43.000Z
|
pixelflow/transforms/functional/__init__.py
|
didriknielsen/pixelcnn_flow
|
9030f6a66d5ff83d7d299541ed55b20b20bb9a15
|
[
"MIT"
] | 1
|
2021-08-05T10:00:04.000Z
|
2021-08-10T11:11:16.000Z
|
pixelflow/transforms/functional/__init__.py
|
didriknielsen/pixelcnn_flow
|
9030f6a66d5ff83d7d299541ed55b20b20bb9a15
|
[
"MIT"
] | null | null | null |
from .splines import *
from .cmol import *
from .cmol_multivariate import *
from .get_mixture_params import *
| 22
| 33
| 0.781818
| 15
| 110
| 5.533333
| 0.533333
| 0.361446
| 0.337349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 110
| 4
| 34
| 27.5
| 0.882979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
18e1e04e2aa587102a9dc5a54b943b01d1d97c3f
| 131
|
py
|
Python
|
software/python/autostep/autostep/__init__.py
|
hanhanhan-kim/autostep
|
4072bcd47be7c25b48b0503198f031e98a4102be
|
[
"MIT"
] | 2
|
2020-01-07T02:59:54.000Z
|
2021-06-23T06:07:42.000Z
|
software/python/autostep/autostep/__init__.py
|
hanhanhan-kim/autostep
|
4072bcd47be7c25b48b0503198f031e98a4102be
|
[
"MIT"
] | 5
|
2020-07-16T03:28:34.000Z
|
2021-05-07T01:11:18.000Z
|
software/python/autostep/autostep/__init__.py
|
hanhanhan-kim/autostep
|
4072bcd47be7c25b48b0503198f031e98a4102be
|
[
"MIT"
] | 3
|
2019-05-26T02:49:29.000Z
|
2020-12-29T05:50:07.000Z
|
from .autostep import Autostep
from .autostep import AutostepException
from .asynchronous_trajectory import AsynchronousTrajectory
| 32.75
| 59
| 0.885496
| 13
| 131
| 8.846154
| 0.538462
| 0.208696
| 0.313043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091603
| 131
| 3
| 60
| 43.666667
| 0.966387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e147d22f9dfa0b16a83fb133ab0d72a670aeecc2
| 235,179
|
py
|
Python
|
bindings/python/ensmallen/datasets/kgobo.py
|
LucaCappelletti94/EnsmallenGraph
|
572532b6d3f4352bf58f9ccca955376acd95fd89
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen/datasets/kgobo.py
|
LucaCappelletti94/EnsmallenGraph
|
572532b6d3f4352bf58f9ccca955376acd95fd89
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen/datasets/kgobo.py
|
LucaCappelletti94/EnsmallenGraph
|
572532b6d3f4352bf58f9ccca955376acd95fd89
|
[
"MIT"
] | null | null | null |
"""Module providing graphs available from KGOBO.
References
----------
Please cite:
```bib
@misc{kgobo,
title = "KG-OBO",
year = "2021",
author = "{Reese, Justin and Caufield, Harry}",
howpublished = {\\url{https://github.com/Knowledge-Graph-Hub/kg-obo}},
note = {Online; accessed 14 September 2021}
}
```
"""
from ensmallen import Graph # pylint: disable=import-error
from .automatic_graph_retrieval import AutomaticallyRetrievedGraph
def MOD(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "10-03-2021-14-36", **kwargs
) -> Graph:
"""Return MOD graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "10-03-2021-14-36"
Version to retrieve
The available versions are:
- 1.031.4
- 10-03-2021-14-36
"""
return AutomaticallyRetrievedGraph(
"MOD", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FBBT(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-24", **kwargs
) -> Graph:
"""Return FBBT graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-24"
Version to retrieve
The available versions are:
- 2022-04-13
- 2021-09-01
- 2021-10-14
- 2021-12-09
- 2022-01-27
- 2022-02-24
"""
return AutomaticallyRetrievedGraph(
"FBBT", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def BTO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-04-27", **kwargs
) -> Graph:
"""Return BTO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-04-27"
Version to retrieve
The available versions are:
- 2021-10-26
- 2021-04-27
"""
return AutomaticallyRetrievedGraph(
"BTO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CHMO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-17", **kwargs
) -> Graph:
"""Return CHMO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-17"
Version to retrieve
The available versions are:
- 2022-04-19
- no_version
- 2022-02-17
"""
return AutomaticallyRetrievedGraph(
"CHMO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OBA(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-01-19", **kwargs
) -> Graph:
"""Return OBA graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-01-19"
Version to retrieve
The available versions are:
- 2022-05-11
- 13-11-2015-10-21
- 2021-12-03
- 2022-01-19
"""
return AutomaticallyRetrievedGraph(
"OBA", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PSO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-05-19", **kwargs
) -> Graph:
"""Return PSO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-05-19"
Version to retrieve
The available versions are:
- 2020-05-19
"""
return AutomaticallyRetrievedGraph(
"PSO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OGSF(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "11-22-2014", **kwargs
) -> Graph:
"""Return OGSF graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "11-22-2014"
Version to retrieve
The available versions are:
- 11-22-2014
"""
return AutomaticallyRetrievedGraph(
"OGSF", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MCO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2019-05-15", **kwargs
) -> Graph:
"""Return MCO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2019-05-15"
Version to retrieve
The available versions are:
- 2019-05-15
"""
return AutomaticallyRetrievedGraph(
"MCO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OPMI(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "Vision-Release--1.0.130", **kwargs
) -> Graph:
"""Return OPMI graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "Vision-Release--1.0.130"
Version to retrieve
The available versions are:
- Vision-Release--1.0.130
"""
return AutomaticallyRetrievedGraph(
"OPMI", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FBDV(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-25", **kwargs
) -> Graph:
"""Return FBDV graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-25"
Version to retrieve
The available versions are:
- 2022-04-12
- 2021-09-01
- 2021-10-13
- 2021-12-06
- 2022-01-24
- 2022-02-25
"""
return AutomaticallyRetrievedGraph(
"FBDV", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CEPH(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2016-01-12", **kwargs
) -> Graph:
"""Return CEPH graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2016-01-12"
Version to retrieve
The available versions are:
- 2016-01-12
"""
return AutomaticallyRetrievedGraph(
"CEPH", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MPATH(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-05-19", **kwargs
) -> Graph:
"""Return MPATH graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-05-19"
Version to retrieve
The available versions are:
- 2020-05-19
"""
return AutomaticallyRetrievedGraph(
"MPATH", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def SPD(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "1.0", **kwargs
) -> Graph:
"""Return SPD graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "1.0"
Version to retrieve
The available versions are:
- 1.0
"""
return AutomaticallyRetrievedGraph(
"SPD", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OMIT(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "dev", **kwargs
) -> Graph:
"""Return OMIT graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "dev"
Version to retrieve
The available versions are:
- dev
"""
return AutomaticallyRetrievedGraph(
"OMIT", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def VT(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "04-10-2021-10-15", **kwargs
) -> Graph:
"""Return VT graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "04-10-2021-10-15"
Version to retrieve
The available versions are:
- 04-10-2021-10-15
"""
return AutomaticallyRetrievedGraph(
"VT", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def EHDAA2(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2013-07-04", **kwargs
) -> Graph:
"""Return EHDAA2 graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2013-07-04"
Version to retrieve
The available versions are:
- 2013-07-04
"""
return AutomaticallyRetrievedGraph(
"EHDAA2", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FLOPO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return FLOPO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"FLOPO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def WBLS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-12-08", **kwargs
) -> Graph:
"""Return WBLS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-12-08"
Version to retrieve
The available versions are:
- 2022-03-22
- 2021-07-06
- 2021-12-08
"""
return AutomaticallyRetrievedGraph(
"WBLS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def RXNO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-12-06", **kwargs
) -> Graph:
"""Return RXNO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-12-06"
Version to retrieve
The available versions are:
- 2021-12-16
- 2021-01-21
- 2021-11-15
- 2021-12-06
"""
return AutomaticallyRetrievedGraph(
"RXNO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OMP(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-05-06", **kwargs
) -> Graph:
"""Return OMP graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-05-06"
Version to retrieve
The available versions are:
- 2022-06-03
- 2021-10-01
- 2021-12-03
- 2022-01-07
- 2022-02-08
- 2022-03-04
- 2022-04-11
- 2022-05-06
"""
return AutomaticallyRetrievedGraph(
"OMP", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ERO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return ERO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"ERO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def GNO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-08-13", **kwargs
) -> Graph:
"""Return GNO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-08-13"
Version to retrieve
The available versions are:
- 2022-02-23
- 2021-08-13
"""
return AutomaticallyRetrievedGraph(
"GNO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def XCO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "4.46", **kwargs
) -> Graph:
"""Return XCO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "4.46"
Version to retrieve
The available versions are:
- 4.46
"""
return AutomaticallyRetrievedGraph(
"XCO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def AMPHX(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-12-18", **kwargs
) -> Graph:
"""Return AMPHX graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-12-18"
Version to retrieve
The available versions are:
- 2020-12-18
"""
return AutomaticallyRetrievedGraph(
"AMPHX", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def EPIO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-05-28", **kwargs
) -> Graph:
"""Return EPIO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-05-28"
Version to retrieve
The available versions are:
- 2021-05-28
"""
return AutomaticallyRetrievedGraph(
"EPIO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CLYH(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-05-29", **kwargs
) -> Graph:
"""Return CLYH graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-05-29"
Version to retrieve
The available versions are:
- 2020-05-29
"""
return AutomaticallyRetrievedGraph(
"CLYH", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OOSTT(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-01-08", **kwargs
) -> Graph:
"""Return OOSTT graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-01-08"
Version to retrieve
The available versions are:
- 2021-01-08
"""
return AutomaticallyRetrievedGraph(
"OOSTT", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FYPO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-05-11", **kwargs
) -> Graph:
"""Return FYPO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-05-11"
Version to retrieve
The available versions are:
- 2022-05-16
- 2021-10-05
- 2021-11-08
- 2021-11-18
- 2021-12-07
- 2022-01-18
- 2022-01-27
- 2022-04-22
- 2022-04-28
- 2022-05-11
"""
return AutomaticallyRetrievedGraph(
"FYPO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def NCRO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2015-12-10", **kwargs
) -> Graph:
"""Return NCRO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2015-12-10"
Version to retrieve
The available versions are:
- 2015-12-10
"""
return AutomaticallyRetrievedGraph(
"NCRO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def IAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-12-09", **kwargs
) -> Graph:
"""Return IAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-12-09"
Version to retrieve
The available versions are:
- 2020-12-09
"""
return AutomaticallyRetrievedGraph(
"IAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def GEO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "production-version-2016-03-26", **kwargs
) -> Graph:
"""Return GEO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "production-version-2016-03-26"
Version to retrieve
The available versions are:
- production-version-2016-03-26
"""
return AutomaticallyRetrievedGraph(
"GEO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def EXO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2.1", **kwargs
) -> Graph:
"""Return EXO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2.1"
Version to retrieve
The available versions are:
- 2.1
"""
return AutomaticallyRetrievedGraph(
"EXO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def SWO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "swo.owl", **kwargs
) -> Graph:
"""Return SWO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "swo.owl"
Version to retrieve
The available versions are:
- swo.owl
"""
return AutomaticallyRetrievedGraph(
"SWO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OBCS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2018-02-22", **kwargs
) -> Graph:
"""Return OBCS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2018-02-22"
Version to retrieve
The available versions are:
- 2018-02-22
"""
return AutomaticallyRetrievedGraph(
"OBCS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ENVO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-05-14", **kwargs
) -> Graph:
"""Return ENVO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-05-14"
Version to retrieve
The available versions are:
- 2021-05-14
"""
return AutomaticallyRetrievedGraph(
"ENVO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def SYMP(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-05-10", **kwargs
) -> Graph:
"""Return SYMP graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-05-10"
Version to retrieve
The available versions are:
- 2022-05-26
- 2020-08-04
- 2022-04-05
- 2022-05-10
"""
return AutomaticallyRetrievedGraph(
"SYMP", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def TAXRANK(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2016-04-15", **kwargs
) -> Graph:
"""Return TAXRANK graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2016-04-15"
Version to retrieve
The available versions are:
- 2016-04-15
"""
return AutomaticallyRetrievedGraph(
"TAXRANK", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def APO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-03-23", **kwargs
) -> Graph:
"""Return APO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-03-23"
Version to retrieve
The available versions are:
- 2022-04-19
- 2021-09-07
- 2021-10-07
- 2022-01-03
- 2022-02-08
- 2022-03-23
"""
return AutomaticallyRetrievedGraph(
"APO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CLO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2019-02-10", **kwargs
) -> Graph:
"""Return CLO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2019-02-10"
Version to retrieve
The available versions are:
- 2022-03-20
- 2019-02-10
"""
return AutomaticallyRetrievedGraph(
"CLO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CMO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2019-02-19", **kwargs
) -> Graph:
"""Return CMO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2019-02-19"
Version to retrieve
The available versions are:
- 2019-02-19
"""
return AutomaticallyRetrievedGraph(
"CMO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OHMI(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2019-09-17", **kwargs
) -> Graph:
"""Return OHMI graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2019-09-17"
Version to retrieve
The available versions are:
- 2019-09-17
"""
return AutomaticallyRetrievedGraph(
"OHMI", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def HSO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-11-28", **kwargs
) -> Graph:
"""Return HSO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-11-28"
Version to retrieve
The available versions are:
- 2021-12-13
- 2020-11-28
"""
return AutomaticallyRetrievedGraph(
"HSO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FBBI(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-11-06", **kwargs
) -> Graph:
"""Return FBBI graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-11-06"
Version to retrieve
The available versions are:
- 2020-11-06
"""
return AutomaticallyRetrievedGraph(
"FBBI", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OBI(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-08-18", **kwargs
) -> Graph:
"""Return OBI graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-08-18"
Version to retrieve
The available versions are:
- 2022-01-03
- 2021-08-18
"""
return AutomaticallyRetrievedGraph(
"OBI", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CDAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2019-06-26", **kwargs
) -> Graph:
"""Return CDAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2019-06-26"
Version to retrieve
The available versions are:
- 2019-06-26
"""
return AutomaticallyRetrievedGraph(
"CDAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MFMO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2013-11-16", **kwargs
) -> Graph:
"""Return MFMO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2013-11-16"
Version to retrieve
The available versions are:
- 2013-11-16
"""
return AutomaticallyRetrievedGraph(
"MFMO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CRO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2019-12-11", **kwargs
) -> Graph:
"""Return CRO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2019-12-11"
Version to retrieve
The available versions are:
- 2019-12-11
"""
return AutomaticallyRetrievedGraph(
"CRO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CHEMINF(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2.0", **kwargs
) -> Graph:
"""Return CHEMINF graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2.0"
Version to retrieve
The available versions are:
- 2.0
"""
return AutomaticallyRetrievedGraph(
"CHEMINF", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MP(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-11-04", **kwargs
) -> Graph:
"""Return MP graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-11-04"
Version to retrieve
The available versions are:
- releases
- 2021-09-21
- 2021-10-15
- 2021-10-26
- 2021-11-04
"""
return AutomaticallyRetrievedGraph(
"MP", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def DUO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-02-23", **kwargs
) -> Graph:
"""Return DUO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-02-23"
Version to retrieve
The available versions are:
- 2021-02-23
"""
return AutomaticallyRetrievedGraph(
"DUO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def LABO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-06-08", **kwargs
) -> Graph:
"""Return LABO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-06-08"
Version to retrieve
The available versions are:
- 2021-06-08
"""
return AutomaticallyRetrievedGraph(
"LABO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OLATDV(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-10", **kwargs
) -> Graph:
"""Return OLATDV graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-10"
Version to retrieve
The available versions are:
- 2020-03-10
"""
return AutomaticallyRetrievedGraph(
"OLATDV", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MPIO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2019-01-30", **kwargs
) -> Graph:
"""Return MPIO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2019-01-30"
Version to retrieve
The available versions are:
- 2019-01-30
"""
return AutomaticallyRetrievedGraph(
"MPIO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CHEBI(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "209", **kwargs
) -> Graph:
"""Return CHEBI graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "209"
Version to retrieve
The available versions are:
- 210
- 203
- 204
- 205
- 206
- 207
- 208
- 209
"""
return AutomaticallyRetrievedGraph(
"CHEBI", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def AEO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2014-12-05", **kwargs
) -> Graph:
"""Return AEO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2014-12-05"
Version to retrieve
The available versions are:
- 2014-12-05
"""
return AutomaticallyRetrievedGraph(
"AEO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FOBI(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "fobi", **kwargs
) -> Graph:
"""Return FOBI graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "fobi"
Version to retrieve
The available versions are:
- fobi
"""
return AutomaticallyRetrievedGraph(
"FOBI", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def GENO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-08", **kwargs
) -> Graph:
"""Return GENO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-08"
Version to retrieve
The available versions are:
- 2022-03-05
- 2020-03-08
"""
return AutomaticallyRetrievedGraph(
"GENO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def SBO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "28-08-2021-03-13", **kwargs
) -> Graph:
"""Return SBO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "28-08-2021-03-13"
Version to retrieve
The available versions are:
- 28-08-2021-03-13
"""
return AutomaticallyRetrievedGraph(
"SBO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def TO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-03-09", **kwargs
) -> Graph:
"""Return TO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-03-09"
Version to retrieve
The available versions are:
- 2022-04-13
- 2021-04-06
- 2022-03-09
"""
return AutomaticallyRetrievedGraph(
"TO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def UO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return UO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"UO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MOP(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-01", **kwargs
) -> Graph:
"""Return MOP graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-01"
Version to retrieve
The available versions are:
- 2022-05-11
- 2014-09-03
- 2022-02-01
"""
return AutomaticallyRetrievedGraph(
"MOP", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CHIRO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2015-11-23", **kwargs
) -> Graph:
"""Return CHIRO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2015-11-23"
Version to retrieve
The available versions are:
- 2015-11-23
"""
return AutomaticallyRetrievedGraph(
"CHIRO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OGMS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-08-19", **kwargs
) -> Graph:
"""Return OGMS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-08-19"
Version to retrieve
The available versions are:
- 2021-08-19
"""
return AutomaticallyRetrievedGraph(
"OGMS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def NCBITAXON(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-12-14", **kwargs
) -> Graph:
"""Return NCBITAXON graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-12-14"
Version to retrieve
The available versions are:
- 2022-02-21
- 2021-06-10
- 2021-12-14
"""
return AutomaticallyRetrievedGraph(
"NCBITAXON", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FOODON(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-11-25", **kwargs
) -> Graph:
"""Return FOODON graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-11-25"
Version to retrieve
The available versions are:
- 2022-02-01
- 2021-09-15
- 2021-11-25
"""
return AutomaticallyRetrievedGraph(
"FOODON", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PW(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "7.52", **kwargs
) -> Graph:
"""Return PW graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "7.52"
Version to retrieve
The available versions are:
- 7.52
"""
return AutomaticallyRetrievedGraph(
"PW", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FOVT(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-10-29", **kwargs
) -> Graph:
"""Return FOVT graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-10-29"
Version to retrieve
The available versions are:
- 2021-11-10
- 2021-06-02
- 2021-10-29
"""
return AutomaticallyRetrievedGraph(
"FOVT", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def XPO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-03-05", **kwargs
) -> Graph:
"""Return XPO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-03-05"
Version to retrieve
The available versions are:
- 2021-03-05
"""
return AutomaticallyRetrievedGraph(
"XPO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ZFS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-10", **kwargs
) -> Graph:
"""Return ZFS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-10"
Version to retrieve
The available versions are:
- 2020-03-10
"""
return AutomaticallyRetrievedGraph(
"ZFS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def RS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "6.107", **kwargs
) -> Graph:
"""Return RS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "6.107"
Version to retrieve
The available versions are:
- 6.107
"""
return AutomaticallyRetrievedGraph(
"RS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CTO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return CTO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"CTO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OMO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-06-08", **kwargs
) -> Graph:
"""Return OMO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-06-08"
Version to retrieve
The available versions are:
- 2022-04-27
- 2020-06-08
"""
return AutomaticallyRetrievedGraph(
"OMO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FIX(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-04-13", **kwargs
) -> Graph:
"""Return FIX graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-04-13"
Version to retrieve
The available versions are:
- 2020-04-13
"""
return AutomaticallyRetrievedGraph(
"FIX", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MAMO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return MAMO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"MAMO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def VTO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-11-13", **kwargs
) -> Graph:
"""Return VTO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-11-13"
Version to retrieve
The available versions are:
- 2020-11-13
"""
return AutomaticallyRetrievedGraph(
"VTO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def UBERON(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-05-17", **kwargs
) -> Graph:
"""Return UBERON graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-05-17"
Version to retrieve
The available versions are:
- 2022-05-27
- 2021-10-01
- 2021-11-28
- 2022-02-21
- 2022-04-05
- 2022-04-18
- 2022-05-17
"""
return AutomaticallyRetrievedGraph(
"UBERON", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MFOMD(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-04-26", **kwargs
) -> Graph:
"""Return MFOMD graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-04-26"
Version to retrieve
The available versions are:
- 2020-04-26
"""
return AutomaticallyRetrievedGraph(
"MFOMD", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def BFO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2019-08-26", **kwargs
) -> Graph:
"""Return BFO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2019-08-26"
Version to retrieve
The available versions are:
- 2019-08-26
"""
return AutomaticallyRetrievedGraph(
"BFO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def HTN(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return HTN graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"HTN", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PORO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2016-09-13", **kwargs
) -> Graph:
"""Return PORO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2016-09-13"
Version to retrieve
The available versions are:
- 2016-10-06
- 2016-09-13
"""
return AutomaticallyRetrievedGraph(
"PORO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def AISM(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-12-13", **kwargs
) -> Graph:
"""Return AISM graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-12-13"
Version to retrieve
The available versions are:
- 2022-03-17
- 2021-09-08
- 2021-11-19
- 2021-12-13
"""
return AutomaticallyRetrievedGraph(
"AISM", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def WBBT(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-12-14", **kwargs
) -> Graph:
"""Return WBBT graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-12-14"
Version to retrieve
The available versions are:
- 2022-03-22
- 2021-09-27
- 2021-12-14
"""
return AutomaticallyRetrievedGraph(
"WBBT", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def HAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return HAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"HAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def SO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-07-12", **kwargs
) -> Graph:
"""Return SO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-07-12"
Version to retrieve
The available versions are:
- 2021-11-22
- 2021-07-12
"""
return AutomaticallyRetrievedGraph(
"SO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def RO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-04-25", **kwargs
) -> Graph:
"""Return RO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-04-25"
Version to retrieve
The available versions are:
- 2022-05-23
- 2021-08-31
- 2021-10-27
- 2021-12-06
- 2022-01-20
- 2022-02-07
- 2022-04-25
"""
return AutomaticallyRetrievedGraph(
"RO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MONDO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-05-02", **kwargs
) -> Graph:
"""Return MONDO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-05-02"
Version to retrieve
The available versions are:
- 2022-06-01
- 2021-09-01
- 2021-10-01
- 2021-11-01
- 2021-12-01
- 2021-12-30
- 2022-02-04
- 2022-03-01
- 2022-04-04
- 2022-05-02
"""
return AutomaticallyRetrievedGraph(
"MONDO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def DDPHENO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-06-19", **kwargs
) -> Graph:
"""Return DDPHENO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-06-19"
Version to retrieve
The available versions are:
- 2022-01-19
- 2020-06-19
"""
return AutomaticallyRetrievedGraph(
"DDPHENO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def IDOMAL(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2015-03-16", **kwargs
) -> Graph:
"""Return IDOMAL graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2015-03-16"
Version to retrieve
The available versions are:
- 2015-03-16
"""
return AutomaticallyRetrievedGraph(
"IDOMAL", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MAXO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-05-06", **kwargs
) -> Graph:
"""Return MAXO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-05-06"
Version to retrieve
The available versions are:
- 2022-06-01
- 2021-08-19
- 2022-03-23
- 2022-04-08
- 2022-04-11
- 2022-05-06
"""
return AutomaticallyRetrievedGraph(
"MAXO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FBCV(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-24", **kwargs
) -> Graph:
"""Return FBCV graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-24"
Version to retrieve
The available versions are:
- 2022-04-14
- 2021-09-02
- 2021-10-18
- 2021-12-13
- 2022-01-24
- 2022-02-24
"""
return AutomaticallyRetrievedGraph(
"FBCV", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def TRANS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-08-04", **kwargs
) -> Graph:
"""Return TRANS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-08-04"
Version to retrieve
The available versions are:
- 2020-08-04
"""
return AutomaticallyRetrievedGraph(
"TRANS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PSDO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-12-04", **kwargs
) -> Graph:
"""Return PSDO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-12-04"
Version to retrieve
The available versions are:
- 2020-12-04
"""
return AutomaticallyRetrievedGraph(
"PSDO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def SCDO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-04-15", **kwargs
) -> Graph:
"""Return SCDO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-04-15"
Version to retrieve
The available versions are:
- 2021-04-15
"""
return AutomaticallyRetrievedGraph(
"SCDO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def LEPAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-11-20", **kwargs
) -> Graph:
"""Return LEPAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-11-20"
Version to retrieve
The available versions are:
- 2021-11-20
"""
return AutomaticallyRetrievedGraph(
"LEPAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ONTONEO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-04-29", **kwargs
) -> Graph:
"""Return ONTONEO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-04-29"
Version to retrieve
The available versions are:
- 2021-04-29
"""
return AutomaticallyRetrievedGraph(
"ONTONEO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def DRON(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-01-28", **kwargs
) -> Graph:
"""Return DRON graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-01-28"
Version to retrieve
The available versions are:
- 2022-04-22
- 2021-08-12
- 2021-10-20
- 2022-01-28
"""
return AutomaticallyRetrievedGraph(
"DRON", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def RBO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-04-16", **kwargs
) -> Graph:
"""Return RBO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-04-16"
Version to retrieve
The available versions are:
- 2022-06-02
- 2021-08-30
- 2022-01-14
- 2022-04-16
"""
return AutomaticallyRetrievedGraph(
"RBO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def NCIT(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-12-17", **kwargs
) -> Graph:
"""Return NCIT graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-12-17"
Version to retrieve
The available versions are:
- 2022-04-14
- 2021-08-20
- 2021-12-17
"""
return AutomaticallyRetrievedGraph(
"NCIT", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FMA(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-04-13", **kwargs
) -> Graph:
"""Return FMA graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-04-13"
Version to retrieve
The available versions are:
- 2020-04-13
"""
return AutomaticallyRetrievedGraph(
"FMA", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def REX(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2017-11-19", **kwargs
) -> Graph:
"""Return REX graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2017-11-19"
Version to retrieve
The available versions are:
- 2017-11-19
"""
return AutomaticallyRetrievedGraph(
"REX", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def COB(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-09-13", **kwargs
) -> Graph:
"""Return COB graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-09-13"
Version to retrieve
The available versions are:
- 2022-05-02
- 2021-09-13
"""
return AutomaticallyRetrievedGraph(
"COB", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def SIBO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2015-06-15", **kwargs
) -> Graph:
"""Return SIBO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2015-06-15"
Version to retrieve
The available versions are:
- 2015-06-15
"""
return AutomaticallyRetrievedGraph(
"SIBO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PDRO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-06-08", **kwargs
) -> Graph:
"""Return PDRO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-06-08"
Version to retrieve
The available versions are:
- 2021-06-08
"""
return AutomaticallyRetrievedGraph(
"PDRO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OGG(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "12-01-2016", **kwargs
) -> Graph:
"""Return OGG graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "12-01-2016"
Version to retrieve
The available versions are:
- 12-01-2016
"""
return AutomaticallyRetrievedGraph(
"OGG", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def XLMOD(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return XLMOD graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- 2019-10-28
- no_version
"""
return AutomaticallyRetrievedGraph(
"XLMOD", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def HANCESTRO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-12-18", **kwargs
) -> Graph:
"""Return HANCESTRO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-12-18"
Version to retrieve
The available versions are:
- 2022-05-12
- 2020-12-18
"""
return AutomaticallyRetrievedGraph(
"HANCESTRO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def GO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-03-22", **kwargs
) -> Graph:
"""Return GO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-03-22"
Version to retrieve
The available versions are:
- 2022-05-16
- 2021-09-01
- 2021-10-26
- 2021-11-16
- 2021-12-15
- 2022-01-13
- 2022-03-10
- 2022-03-22
"""
return AutomaticallyRetrievedGraph(
"GO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MF(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-09-21", **kwargs
) -> Graph:
"""Return MF graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-09-21"
Version to retrieve
The available versions are:
- 2021-11-17
- 2021-09-21
"""
return AutomaticallyRetrievedGraph(
"MF", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def GSSO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2.0.5", **kwargs
) -> Graph:
"""Return GSSO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2.0.5"
Version to retrieve
The available versions are:
- 2.0.5
"""
return AutomaticallyRetrievedGraph(
"GSSO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def UPHENO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return UPHENO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"UPHENO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PLANA(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-10-21", **kwargs
) -> Graph:
"""Return PLANA graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-10-21"
Version to retrieve
The available versions are:
- releases
- 2021-09-29
- 2021-10-06
- 2021-10-21
"""
return AutomaticallyRetrievedGraph(
"PLANA", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OAE(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "1.2.44", **kwargs
) -> Graph:
"""Return OAE graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "1.2.44"
Version to retrieve
The available versions are:
- 1.2.44
"""
return AutomaticallyRetrievedGraph(
"OAE", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MMUSDV(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-10", **kwargs
) -> Graph:
"""Return MMUSDV graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-10"
Version to retrieve
The available versions are:
- 2020-03-10
"""
return AutomaticallyRetrievedGraph(
"MMUSDV", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "4.1.88", **kwargs
) -> Graph:
"""Return MS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "4.1.88"
Version to retrieve
The available versions are:
- 4.1.89
- 4.1.35
- 4.1.62
- 4.1.64
- 4.1.65
- 4.1.67
- 4.1.69
- 4.1.70
- 4.1.71
- 4.1.78
- 4.1.82
- 4.1.83
- 4.1.84
- 4.1.86
- 4.1.88
"""
return AutomaticallyRetrievedGraph(
"MS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def APOLLO_SV(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "v4.1.1.", **kwargs
) -> Graph:
"""Return APOLLO_SV graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "v4.1.1."
Version to retrieve
The available versions are:
- v4.1.1.
"""
return AutomaticallyRetrievedGraph(
"APOLLO_SV", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def HSAPDV(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-10", **kwargs
) -> Graph:
"""Return HSAPDV graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-10"
Version to retrieve
The available versions are:
- 2020-03-10
"""
return AutomaticallyRetrievedGraph(
"HSAPDV", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def VO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "September_18__2021", **kwargs
) -> Graph:
"""Return VO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "September_18__2021"
Version to retrieve
The available versions are:
- March-19--2022
- September_18__2021
"""
return AutomaticallyRetrievedGraph(
"VO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MIRO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2014-05-14", **kwargs
) -> Graph:
"""Return MIRO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2014-05-14"
Version to retrieve
The available versions are:
- 2014-05-14
"""
return AutomaticallyRetrievedGraph(
"MIRO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def EMAPA(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-09-01", **kwargs
) -> Graph:
"""Return EMAPA graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-09-01"
Version to retrieve
The available versions are:
- 2021-09-01
"""
return AutomaticallyRetrievedGraph(
"EMAPA", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def GECKO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-01-18", **kwargs
) -> Graph:
"""Return GECKO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-01-18"
Version to retrieve
The available versions are:
- 2021-01-18
"""
return AutomaticallyRetrievedGraph(
"GECKO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CARO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-18", **kwargs
) -> Graph:
"""Return CARO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-18"
Version to retrieve
The available versions are:
- 2022-02-18
"""
return AutomaticallyRetrievedGraph(
"CARO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def GENEPIO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-01-26", **kwargs
) -> Graph:
"""Return GENEPIO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-01-26"
Version to retrieve
The available versions are:
- 2022-02-06
- 2021-05-24
- 2022-01-26
"""
return AutomaticallyRetrievedGraph(
"GENEPIO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def TADS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2015-08-20", **kwargs
) -> Graph:
"""Return TADS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2015-08-20"
Version to retrieve
The available versions are:
- 2015-08-20
"""
return AutomaticallyRetrievedGraph(
"TADS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-05-07", **kwargs
) -> Graph:
"""Return FAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-05-07"
Version to retrieve
The available versions are:
- 2020-05-07
"""
return AutomaticallyRetrievedGraph(
"FAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CVDO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-05", **kwargs
) -> Graph:
"""Return CVDO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-05"
Version to retrieve
The available versions are:
- 2020-03-05
"""
return AutomaticallyRetrievedGraph(
"CVDO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ECAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-05-22", **kwargs
) -> Graph:
"""Return ECAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-05-22"
Version to retrieve
The available versions are:
- 2020-05-22
"""
return AutomaticallyRetrievedGraph(
"ECAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OHPI(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "releases", **kwargs
) -> Graph:
"""Return OHPI graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "releases"
Version to retrieve
The available versions are:
- releases
"""
return AutomaticallyRetrievedGraph(
"OHPI", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OPL(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-01-28", **kwargs
) -> Graph:
"""Return OPL graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-01-28"
Version to retrieve
The available versions are:
- 2021-01-28
"""
return AutomaticallyRetrievedGraph(
"OPL", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def TGMA(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2013-06-03", **kwargs
) -> Graph:
"""Return TGMA graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2013-06-03"
Version to retrieve
The available versions are:
- 2013-06-03
"""
return AutomaticallyRetrievedGraph(
"TGMA", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def BCO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-27", **kwargs
) -> Graph:
"""Return BCO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-27"
Version to retrieve
The available versions are:
- 2021-11-14
- 2020-03-27
"""
return AutomaticallyRetrievedGraph(
"BCO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ICO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-04-21", **kwargs
) -> Graph:
"""Return ICO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-04-21"
Version to retrieve
The available versions are:
- 2021-04-21
"""
return AutomaticallyRetrievedGraph(
"ICO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ZECO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-06-04", **kwargs
) -> Graph:
"""Return ZECO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-06-04"
Version to retrieve
The available versions are:
- 2022-02-14
- 2021-06-04
"""
return AutomaticallyRetrievedGraph(
"ZECO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PHIPO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-07-14", **kwargs
) -> Graph:
"""Return PHIPO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-07-14"
Version to retrieve
The available versions are:
- 2021-07-14
"""
return AutomaticallyRetrievedGraph(
"PHIPO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PDUMDV(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-10", **kwargs
) -> Graph:
"""Return PDUMDV graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-10"
Version to retrieve
The available versions are:
- 2020-03-10
"""
return AutomaticallyRetrievedGraph(
"PDUMDV", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ARO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "05-10-2021-09-37", **kwargs
) -> Graph:
"""Return ARO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "05-10-2021-09-37"
Version to retrieve
The available versions are:
- 05-10-2021-09-37
"""
return AutomaticallyRetrievedGraph(
"ARO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OARCS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return OARCS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"OARCS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PCL(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-03-02", **kwargs
) -> Graph:
"""Return PCL graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-03-02"
Version to retrieve
The available versions are:
- 2022-04-27
- 2022-01-24
- 2022-02-02
- 2022-02-09
- 2022-03-02
"""
return AutomaticallyRetrievedGraph(
"PCL", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CTENO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2016-10-19", **kwargs
) -> Graph:
"""Return CTENO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2016-10-19"
Version to retrieve
The available versions are:
- 2016-10-19
"""
return AutomaticallyRetrievedGraph(
"CTENO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PLANP(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-28", **kwargs
) -> Graph:
"""Return PLANP graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-28"
Version to retrieve
The available versions are:
- 2020-03-28
"""
return AutomaticallyRetrievedGraph(
"PLANP", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def DOID(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-04-01", **kwargs
) -> Graph:
"""Return DOID graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-04-01"
Version to retrieve
The available versions are:
- 2022-04-28
- 2021-10-01
- 2021-10-12
- 2021-11-17
- 2021-12-15
- 2022-01-31
- 2022-02-21
- 2022-03-02
- 2022-04-01
"""
return AutomaticallyRetrievedGraph(
"DOID", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OMRSE(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-08-30", **kwargs
) -> Graph:
"""Return OMRSE graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-08-30"
Version to retrieve
The available versions are:
- 2022-04-06
- 2021-08-30
"""
return AutomaticallyRetrievedGraph(
"OMRSE", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PPO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2018-10-26", **kwargs
) -> Graph:
"""Return PPO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2018-10-26"
Version to retrieve
The available versions are:
- 2018-10-26
"""
return AutomaticallyRetrievedGraph(
"PPO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OVAE(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "04-11-2016", **kwargs
) -> Graph:
"""Return OVAE graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "04-11-2016"
Version to retrieve
The available versions are:
- 04-11-2016
"""
return AutomaticallyRetrievedGraph(
"OVAE", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ZP(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-12-12", **kwargs
) -> Graph:
"""Return ZP graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-12-12"
Version to retrieve
The available versions are:
- 2021-12-12
"""
return AutomaticallyRetrievedGraph(
"ZP", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def STATO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "RC1.4", **kwargs
) -> Graph:
"""Return STATO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "RC1.4"
Version to retrieve
The available versions are:
- RC1.4
"""
return AutomaticallyRetrievedGraph(
"STATO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ONE(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return ONE graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"ONE", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ECTO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-05-04", **kwargs
) -> Graph:
"""Return ECTO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-05-04"
Version to retrieve
The available versions are:
- 2022-05-12
- 2021-08-25
- 2022-03-09
- 2022-05-04
"""
return AutomaticallyRetrievedGraph(
"ECTO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def XAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-03-04", **kwargs
) -> Graph:
"""Return XAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-03-04"
Version to retrieve
The available versions are:
- 2021-03-04
"""
return AutomaticallyRetrievedGraph(
"XAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MIAPA(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return MIAPA graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"MIAPA", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MI(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-04-13", **kwargs
) -> Graph:
"""Return MI graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-04-13"
Version to retrieve
The available versions are:
- 2020-04-13
"""
return AutomaticallyRetrievedGraph(
"MI", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ECOCORE(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-02-17", **kwargs
) -> Graph:
"""Return ECOCORE graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-02-17"
Version to retrieve
The available versions are:
- 2022-03-09
- 2021-02-17
"""
return AutomaticallyRetrievedGraph(
"ECOCORE", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MMO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2.39", **kwargs
) -> Graph:
"""Return MMO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2.39"
Version to retrieve
The available versions are:
- 2.39
"""
return AutomaticallyRetrievedGraph(
"MMO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def EUPATH(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-09-09", **kwargs
) -> Graph:
"""Return EUPATH graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-09-09"
Version to retrieve
The available versions are:
- 2022-02-15
- 2021-09-09
"""
return AutomaticallyRetrievedGraph(
"EUPATH", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OBIB(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-02-02", **kwargs
) -> Graph:
"""Return OBIB graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-02-02"
Version to retrieve
The available versions are:
- 2021-11-12
- 2021-02-02
"""
return AutomaticallyRetrievedGraph(
"OBIB", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def IDO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2017-11-03", **kwargs
) -> Graph:
"""Return IDO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2017-11-03"
Version to retrieve
The available versions are:
- 2017-11-03
"""
return AutomaticallyRetrievedGraph(
"IDO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def SEPIO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return SEPIO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"SEPIO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def TTO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "19-07-2012-13-26", **kwargs
) -> Graph:
"""Return TTO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "19-07-2012-13-26"
Version to retrieve
The available versions are:
- 19-07-2012-13-26
"""
return AutomaticallyRetrievedGraph(
"TTO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PR(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "65.0", **kwargs
) -> Graph:
"""Return PR graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "65.0"
Version to retrieve
The available versions are:
- 66.0
- 63.0
- 64.0
- 65.0
"""
return AutomaticallyRetrievedGraph(
"PR", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def NBO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-02-15", **kwargs
) -> Graph:
"""Return NBO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-02-15"
Version to retrieve
The available versions are:
- 2021-02-15
"""
return AutomaticallyRetrievedGraph(
"NBO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def WBPHENOTYPE(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-12-20", **kwargs
) -> Graph:
"""Return WBPHENOTYPE graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-12-20"
Version to retrieve
The available versions are:
- 2022-03-22
- 2021-09-27
- 2021-10-25
- 2021-12-12
- 2021-12-20
"""
return AutomaticallyRetrievedGraph(
"WBPHENOTYPE", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PECO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-08-21", **kwargs
) -> Graph:
"""Return PECO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-08-21"
Version to retrieve
The available versions are:
- 2020-08-21
"""
return AutomaticallyRetrievedGraph(
"PECO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def GAZ(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return GAZ graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"GAZ", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CIO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2015-03-10", **kwargs
) -> Graph:
"""Return CIO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2015-03-10"
Version to retrieve
The available versions are:
- 2015-03-10
"""
return AutomaticallyRetrievedGraph(
"CIO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def INO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "1.0.112", **kwargs
) -> Graph:
"""Return INO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "1.0.112"
Version to retrieve
The available versions are:
- 1.0.112
"""
return AutomaticallyRetrievedGraph(
"INO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CLAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-09-27", **kwargs
) -> Graph:
"""Return CLAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-09-27"
Version to retrieve
The available versions are:
- 2021-09-27
"""
return AutomaticallyRetrievedGraph(
"CLAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def UPA(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2018-12-12", **kwargs
) -> Graph:
"""Return UPA graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2018-12-12"
Version to retrieve
The available versions are:
- 2018-12-12
"""
return AutomaticallyRetrievedGraph(
"UPA", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def NOMEN(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return NOMEN graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"NOMEN", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ZFA(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-28", **kwargs
) -> Graph:
"""Return ZFA graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-28"
Version to retrieve
The available versions are:
- 2022-03-15
- 2020-04-14
- 2021-12-09
- 2022-02-15
- 2022-02-28
"""
return AutomaticallyRetrievedGraph(
"ZFA", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def DISDRIV(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return DISDRIV graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"DISDRIV", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CIDO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "09-03-2021", **kwargs
) -> Graph:
"""Return CIDO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "09-03-2021"
Version to retrieve
The available versions are:
- 09-03-2021
"""
return AutomaticallyRetrievedGraph(
"CIDO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def COLAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-12-14", **kwargs
) -> Graph:
"""Return COLAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-12-14"
Version to retrieve
The available versions are:
- 2021-12-14
"""
return AutomaticallyRetrievedGraph(
"COLAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def KISAO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2.30", **kwargs
) -> Graph:
"""Return KISAO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2.30"
Version to retrieve
The available versions are:
- 2.30
"""
return AutomaticallyRetrievedGraph(
"KISAO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MA(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2017-02-07", **kwargs
) -> Graph:
"""Return MA graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2017-02-07"
Version to retrieve
The available versions are:
- 2017-02-07
"""
return AutomaticallyRetrievedGraph(
"MA", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-08-13", **kwargs
) -> Graph:
"""Return PO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-08-13"
Version to retrieve
The available versions are:
- 2021-08-13
"""
return AutomaticallyRetrievedGraph(
"PO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CDNO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-01-10", **kwargs
) -> Graph:
"""Return CDNO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-01-10"
Version to retrieve
The available versions are:
- 2022-04-06
- 2021-10-20
- 2022-01-10
"""
return AutomaticallyRetrievedGraph(
"CDNO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ONS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "1.2.2", **kwargs
) -> Graph:
"""Return ONS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "1.2.2"
Version to retrieve
The available versions are:
- no_version
- 1.2.2
"""
return AutomaticallyRetrievedGraph(
"ONS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OHD(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2016-06-27", **kwargs
) -> Graph:
"""Return OHD graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2016-06-27"
Version to retrieve
The available versions are:
- 2016-06-27
"""
return AutomaticallyRetrievedGraph(
"OHD", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def VARIO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return VARIO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"VARIO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def AGRO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-07-01", **kwargs
) -> Graph:
"""Return AGRO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-07-01"
Version to retrieve
The available versions are:
- 2021-11-05
- 2021-07-01
"""
return AutomaticallyRetrievedGraph(
"AGRO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def DIDEO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-06-11", **kwargs
) -> Graph:
"""Return DIDEO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-06-11"
Version to retrieve
The available versions are:
- 2021-06-11
"""
return AutomaticallyRetrievedGraph(
"DIDEO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def TXPO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-03", **kwargs
) -> Graph:
"""Return TXPO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-03"
Version to retrieve
The available versions are:
- 2020-03-03
"""
return AutomaticallyRetrievedGraph(
"TXPO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PATO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-20", **kwargs
) -> Graph:
"""Return PATO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-20"
Version to retrieve
The available versions are:
- 2022-05-20
- 2021-09-09
- 2021-11-05
- 2021-12-03
- 2022-01-12
- 2022-02-08
- 2022-02-20
"""
return AutomaticallyRetrievedGraph(
"PATO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def HOM(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2015-01-07", **kwargs
) -> Graph:
"""Return HOM graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2015-01-07"
Version to retrieve
The available versions are:
- 2015-01-07
"""
return AutomaticallyRetrievedGraph(
"HOM", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ECO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-04-11", **kwargs
) -> Graph:
"""Return ECO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-04-11"
Version to retrieve
The available versions are:
- 2022-05-27
- 2021-10-20
- 2021-12-03
- 2022-01-04
- 2022-02-09
- 2022-04-11
"""
return AutomaticallyRetrievedGraph(
"ECO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ICEO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2.1", **kwargs
) -> Graph:
"""Return ICEO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2.1"
Version to retrieve
The available versions are:
- 2.1
"""
return AutomaticallyRetrievedGraph(
"ICEO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def DDANAT(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-04-13", **kwargs
) -> Graph:
"""Return DDANAT graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-04-13"
Version to retrieve
The available versions are:
- 2020-04-13
"""
return AutomaticallyRetrievedGraph(
"DDANAT", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def BSPO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-09-22", **kwargs
) -> Graph:
"""Return BSPO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-09-22"
Version to retrieve
The available versions are:
- 2021-10-13
- 2021-09-22
"""
return AutomaticallyRetrievedGraph(
"BSPO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MRO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-03-14", **kwargs
) -> Graph:
"""Return MRO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-03-14"
Version to retrieve
The available versions are:
- 2022-05-13
- 2021-09-24
- 2021-10-15
- 2021-11-04
- 2021-11-29
- 2021-12-15
- 2022-01-13
- 2022-01-21
- 2022-03-14
"""
return AutomaticallyRetrievedGraph(
"MRO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def PCO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-05-03", **kwargs
) -> Graph:
"""Return PCO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-05-03"
Version to retrieve
The available versions are:
- 2021-05-03
"""
return AutomaticallyRetrievedGraph(
"PCO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def EPSO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "", **kwargs
) -> Graph:
"""Return EPSO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = ""
Version to retrieve
The available versions are:
- 2021-05-28
-
"""
return AutomaticallyRetrievedGraph(
"EPSO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ORNASEQ(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2019-07-08", **kwargs
) -> Graph:
"""Return ORNASEQ graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2019-07-08"
Version to retrieve
The available versions are:
- 2019-07-08
"""
return AutomaticallyRetrievedGraph(
"ORNASEQ", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def HP(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-14", **kwargs
) -> Graph:
"""Return HP graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-14"
Version to retrieve
The available versions are:
- 2022-04-14
- 2021-08-02
- 2021-10-10
- 2022-02-14
"""
return AutomaticallyRetrievedGraph(
"HP", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def DPO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-02-24", **kwargs
) -> Graph:
"""Return DPO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-02-24"
Version to retrieve
The available versions are:
- 2022-04-13
- 2021-09-02
- 2021-10-15
- 2021-12-10
- 2022-01-24
- 2022-02-24
"""
return AutomaticallyRetrievedGraph(
"DPO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CL(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-01-05", **kwargs
) -> Graph:
"""Return CL graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-01-05"
Version to retrieve
The available versions are:
- 2022-02-16
- 2021-09-09
- 2021-11-25
- 2021-12-07
- 2021-12-16
- 2022-01-05
"""
return AutomaticallyRetrievedGraph(
"CL", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MFOEM(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-09-21", **kwargs
) -> Graph:
"""Return MFOEM graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-09-21"
Version to retrieve
The available versions are:
- 2021-11-17
- 2021-09-21
"""
return AutomaticallyRetrievedGraph(
"MFOEM", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
| 35.222256
| 96
| 0.661585
| 30,458
| 235,179
| 4.875698
| 0.012673
| 0.031514
| 0.074846
| 0.070907
| 0.924595
| 0.917827
| 0.917827
| 0.914864
| 0.902865
| 0.878428
| 0
| 0.034882
| 0.248849
| 235,179
| 6,676
| 97
| 35.227531
| 0.805763
| 0.510343
| 0
| 0.74802
| 0
| 0
| 0.065852
| 0.000466
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090824
| false
| 0
| 0.000932
| 0
| 0.18258
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e14abc0cc68bfeea08d4d311c1fc03ce94abb036
| 7,623
|
py
|
Python
|
tests/compute/spatial/test_rotate_axis.py
|
pfackeldey/vector
|
87e9e942f2a9ae09a3e250e12f37505eb22e25ea
|
[
"BSD-3-Clause"
] | 40
|
2020-03-26T13:28:36.000Z
|
2022-03-23T22:14:35.000Z
|
tests/compute/spatial/test_rotate_axis.py
|
pfackeldey/vector
|
87e9e942f2a9ae09a3e250e12f37505eb22e25ea
|
[
"BSD-3-Clause"
] | 66
|
2020-02-14T13:32:18.000Z
|
2022-03-29T14:35:31.000Z
|
tests/compute/spatial/test_rotate_axis.py
|
pfackeldey/vector
|
87e9e942f2a9ae09a3e250e12f37505eb22e25ea
|
[
"BSD-3-Clause"
] | 14
|
2020-01-29T22:03:33.000Z
|
2022-03-16T02:46:25.000Z
|
# Copyright (c) 2019-2021, Jonas Eschle, Jim Pivarski, Eduardo Rodrigues, and Henry Schreiner.
#
# Distributed under the 3-clause BSD license, see accompanying file LICENSE
# or https://github.com/scikit-hep/vector for details.
import numpy
import pytest
import vector._backends.numpy_
import vector._backends.object_
def test_spatial_object():
axis = vector._backends.object_.VectorObject3D(
vector._backends.object_.AzimuthalObjectXY(0.1, 0.2),
vector._backends.object_.LongitudinalObjectZ(0.3),
)
vec = vector._backends.object_.VectorObject3D(
vector._backends.object_.AzimuthalObjectXY(0.4, 0.5),
vector._backends.object_.LongitudinalObjectZ(0.6),
)
out = vec.rotate_axis(axis, 0.25)
assert isinstance(out, vector._backends.object_.VectorObject3D)
assert isinstance(out.azimuthal, vector._backends.object_.AzimuthalObjectXY)
assert isinstance(out.longitudinal, vector._backends.object_.LongitudinalObjectZ)
assert out.x == pytest.approx(0.37483425404335763)
assert out.y == pytest.approx(0.5383405688588193)
assert out.z == pytest.approx(0.5828282027463345)
for t1 in "xyz", "xytheta", "xyeta", "rhophiz", "rhophitheta", "rhophieta":
for t2 in "xyz", "xytheta", "xyeta", "rhophiz", "rhophitheta", "rhophieta":
taxis, tvec = (
getattr(axis, "to_" + t1)(),
getattr(vec, "to_" + t2)(),
)
out = tvec.rotate_axis(taxis, 0.25)
assert isinstance(out, vector._backends.object_.VectorObject3D)
assert isinstance(out.azimuthal, vector._backends.object_.AzimuthalObjectXY)
assert isinstance(
out.longitudinal, vector._backends.object_.LongitudinalObjectZ
)
assert out.x == pytest.approx(0.37483425404335763)
assert out.y == pytest.approx(0.5383405688588193)
assert out.z == pytest.approx(0.5828282027463345)
def test_spatial_numpy():
axis = vector._backends.numpy_.VectorNumpy3D(
[(0.1, 0.2, 0.3)],
dtype=[("x", numpy.float64), ("y", numpy.float64), ("z", numpy.float64)],
)
vec = vector._backends.numpy_.VectorNumpy3D(
[(0.4, 0.5, 0.6)],
dtype=[("x", numpy.float64), ("y", numpy.float64), ("z", numpy.float64)],
)
out = vec.rotate_axis(axis, 0.25)
assert isinstance(out, vector._backends.numpy_.VectorNumpy3D)
assert out.dtype.names == ("x", "y", "z")
assert out[0].x == pytest.approx(0.37483425404335763)
assert out[0].y == pytest.approx(0.5383405688588193)
assert out[0].z == pytest.approx(0.5828282027463345)
for t1 in "xyz", "xytheta", "xyeta", "rhophiz", "rhophitheta", "rhophieta":
for t2 in "xyz", "xytheta", "xyeta", "rhophiz", "rhophitheta", "rhophieta":
taxis, tvec = (
getattr(axis, "to_" + t1)(),
getattr(vec, "to_" + t2)(),
)
out = tvec.rotate_axis(taxis, 0.25)
assert isinstance(out, vector._backends.numpy_.VectorNumpy3D)
assert out.dtype.names == ("x", "y", "z")
assert out[0].x == pytest.approx(0.37483425404335763)
assert out[0].y == pytest.approx(0.5383405688588193)
assert out[0].z == pytest.approx(0.5828282027463345)
def test_lorentz_object():
axis = vector._backends.object_.VectorObject4D(
vector._backends.object_.AzimuthalObjectXY(0.1, 0.2),
vector._backends.object_.LongitudinalObjectZ(0.3),
vector._backends.object_.TemporalObjectT(99),
)
vec = vector._backends.object_.VectorObject4D(
vector._backends.object_.AzimuthalObjectXY(0.4, 0.5),
vector._backends.object_.LongitudinalObjectZ(0.6),
vector._backends.object_.TemporalObjectT(99),
)
out = vec.rotate_axis(axis, 0.25)
assert isinstance(out, vector._backends.object_.VectorObject4D)
assert isinstance(out.azimuthal, vector._backends.object_.AzimuthalObjectXY)
assert isinstance(out.longitudinal, vector._backends.object_.LongitudinalObjectZ)
assert hasattr(out, "temporal")
assert out.x == pytest.approx(0.37483425404335763)
assert out.y == pytest.approx(0.5383405688588193)
assert out.z == pytest.approx(0.5828282027463345)
for t1 in (
"xyzt",
"xythetat",
"xyetat",
"rhophizt",
"rhophithetat",
"rhophietat",
"xyztau",
"xythetatau",
"xyetatau",
"rhophiztau",
"rhophithetatau",
"rhophietatau",
):
for t2 in (
"xyzt",
"xythetat",
"xyetat",
"rhophizt",
"rhophithetat",
"rhophietat",
"xyztau",
"xythetatau",
"xyetatau",
"rhophiztau",
"rhophithetatau",
"rhophietatau",
):
taxis, tvec = (
getattr(axis, "to_" + t1)(),
getattr(vec, "to_" + t2)(),
)
out = tvec.rotate_axis(taxis, 0.25)
assert isinstance(out, vector._backends.object_.VectorObject4D)
assert isinstance(out.azimuthal, vector._backends.object_.AzimuthalObjectXY)
assert isinstance(
out.longitudinal, vector._backends.object_.LongitudinalObjectZ
)
assert hasattr(out, "temporal")
assert out.x == pytest.approx(0.37483425404335763)
assert out.y == pytest.approx(0.5383405688588193)
assert out.z == pytest.approx(0.5828282027463345)
def test_lorentz_numpy():
axis = vector._backends.numpy_.VectorNumpy4D(
[(0.1, 0.2, 0.3, 99)],
dtype=[
("x", numpy.float64),
("y", numpy.float64),
("z", numpy.float64),
("t", numpy.float64),
],
)
vec = vector._backends.numpy_.VectorNumpy4D(
[(0.4, 0.5, 0.6, 99)],
dtype=[
("x", numpy.float64),
("y", numpy.float64),
("z", numpy.float64),
("t", numpy.float64),
],
)
out = vec.rotate_axis(axis, 0.25)
assert isinstance(out, vector._backends.numpy_.VectorNumpy4D)
assert out.dtype.names == ("x", "y", "z", "t")
assert out[0].x == pytest.approx(0.37483425404335763)
assert out[0].y == pytest.approx(0.5383405688588193)
assert out[0].z == pytest.approx(0.5828282027463345)
for t1 in (
"xyzt",
"xythetat",
"xyetat",
"rhophizt",
"rhophithetat",
"rhophietat",
"xyztau",
"xythetatau",
"xyetatau",
"rhophiztau",
"rhophithetatau",
"rhophietatau",
):
for t2 in (
"xyzt",
"xythetat",
"xyetat",
"rhophizt",
"rhophithetat",
"rhophietat",
"xyztau",
"xythetatau",
"xyetatau",
"rhophiztau",
"rhophithetatau",
"rhophietatau",
):
taxis, tvec = (
getattr(axis, "to_" + t1)(),
getattr(vec, "to_" + t2)(),
)
out = tvec.rotate_axis(taxis, 0.25)
assert isinstance(out, vector._backends.numpy_.VectorNumpy4D)
assert out.dtype.names in {
("x", "y", "z", "t"),
("x", "y", "z", "tau"),
}
assert out[0].x == pytest.approx(0.37483425404335763)
assert out[0].y == pytest.approx(0.5383405688588193)
assert out[0].z == pytest.approx(0.5828282027463345)
| 36.649038
| 94
| 0.577988
| 774
| 7,623
| 5.568475
| 0.133075
| 0.116937
| 0.12529
| 0.068677
| 0.932715
| 0.885847
| 0.87007
| 0.869374
| 0.864501
| 0.832947
| 0
| 0.105504
| 0.282566
| 7,623
| 207
| 95
| 36.826087
| 0.682575
| 0.028729
| 0
| 0.793651
| 0
| 0
| 0.091093
| 0
| 0
| 0
| 0
| 0
| 0.243386
| 1
| 0.021164
| false
| 0
| 0.021164
| 0
| 0.042328
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e165021c216c89ce52a4c396f61b648c2acb4a6c
| 16,695
|
py
|
Python
|
release/stubs.min/Wms/RemotingImplementation/DataSetTableAdapters.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/Wms/RemotingImplementation/DataSetTableAdapters.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/Wms/RemotingImplementation/DataSetTableAdapters.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module Wms.RemotingImplementation.DataSetTableAdapters calls itself DataSetTableAdapters
# from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=null
# by generator 1.145
# no doc
# no important
from __init__ import *
# no functions
# classes
class PurchaseOrders_GetHistoryLinesTableAdapter(Component):
""" PurchaseOrders_GetHistoryLinesTableAdapter() """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return PurchaseOrders_GetHistoryLinesTableAdapter()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Dispose(self):
"""
Dispose(self: Component,disposing: bool)
Releases the unmanaged resources used by the System.ComponentModel.Component and optionally releases the managed resources.
disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources.
"""
pass
def Fill(self,dataTable,GroupGuid,PageStart,PageLimit):
""" Fill(self: PurchaseOrders_GetHistoryLinesTableAdapter,dataTable: PurchaseOrders_GetHistoryLinesDataTable,GroupGuid: Nullable[Guid],PageStart: Nullable[int],PageLimit: Nullable[int]) -> int """
pass
def GetData(self,GroupGuid,PageStart,PageLimit):
""" GetData(self: PurchaseOrders_GetHistoryLinesTableAdapter,GroupGuid: Nullable[Guid],PageStart: Nullable[int],PageLimit: Nullable[int]) -> PurchaseOrders_GetHistoryLinesDataTable """
pass
def GetService(self,*args):
"""
GetService(self: Component,service: Type) -> object
Returns an object that represents a service provided by the System.ComponentModel.Component or by its System.ComponentModel.Container.
service: A service provided by the System.ComponentModel.Component.
Returns: An System.Object that represents a service provided by the System.ComponentModel.Component,or null if the System.ComponentModel.Component does not provide the specified
service.
"""
pass
def MemberwiseClone(self,*args):
"""
MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject
Creates a shallow copy of the current System.MarshalByRefObject object.
cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting
boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls
to be routed to the remote server object.
Returns: A shallow copy of the current System.MarshalByRefObject object.
MemberwiseClone(self: object) -> object
Creates a shallow copy of the current System.Object.
Returns: A shallow copy of the current System.Object.
"""
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
Adapter=property(lambda self: object(),lambda self,v: None,lambda self: None)
CanRaiseEvents=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the component can raise an event.
"""
ClearBeforeFill=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: ClearBeforeFill(self: PurchaseOrders_GetHistoryLinesTableAdapter) -> bool
Set: ClearBeforeFill(self: PurchaseOrders_GetHistoryLinesTableAdapter)=value
"""
CommandCollection=property(lambda self: object(),lambda self,v: None,lambda self: None)
DesignMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether the System.ComponentModel.Component is currently in design mode.
"""
Events=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the list of event handlers that are attached to this System.ComponentModel.Component.
"""
class RmaOrders_GetHistoryLinesTableAdapter(Component):
""" RmaOrders_GetHistoryLinesTableAdapter() """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return RmaOrders_GetHistoryLinesTableAdapter()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Dispose(self):
"""
Dispose(self: Component,disposing: bool)
Releases the unmanaged resources used by the System.ComponentModel.Component and optionally releases the managed resources.
disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources.
"""
pass
def Fill(self,dataTable,GroupGuid,PageStart,PageLimit):
""" Fill(self: RmaOrders_GetHistoryLinesTableAdapter,dataTable: RmaOrders_GetHistoryLinesDataTable,GroupGuid: Nullable[Guid],PageStart: Nullable[int],PageLimit: Nullable[int]) -> int """
pass
def GetData(self,GroupGuid,PageStart,PageLimit):
""" GetData(self: RmaOrders_GetHistoryLinesTableAdapter,GroupGuid: Nullable[Guid],PageStart: Nullable[int],PageLimit: Nullable[int]) -> RmaOrders_GetHistoryLinesDataTable """
pass
def GetService(self,*args):
"""
GetService(self: Component,service: Type) -> object
Returns an object that represents a service provided by the System.ComponentModel.Component or by its System.ComponentModel.Container.
service: A service provided by the System.ComponentModel.Component.
Returns: An System.Object that represents a service provided by the System.ComponentModel.Component,or null if the System.ComponentModel.Component does not provide the specified
service.
"""
pass
def MemberwiseClone(self,*args):
"""
MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject
Creates a shallow copy of the current System.MarshalByRefObject object.
cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting
boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls
to be routed to the remote server object.
Returns: A shallow copy of the current System.MarshalByRefObject object.
MemberwiseClone(self: object) -> object
Creates a shallow copy of the current System.Object.
Returns: A shallow copy of the current System.Object.
"""
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
Adapter=property(lambda self: object(),lambda self,v: None,lambda self: None)
CanRaiseEvents=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the component can raise an event.
"""
ClearBeforeFill=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: ClearBeforeFill(self: RmaOrders_GetHistoryLinesTableAdapter) -> bool
Set: ClearBeforeFill(self: RmaOrders_GetHistoryLinesTableAdapter)=value
"""
CommandCollection=property(lambda self: object(),lambda self,v: None,lambda self: None)
DesignMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether the System.ComponentModel.Component is currently in design mode.
"""
Events=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the list of event handlers that are attached to this System.ComponentModel.Component.
"""
class Shipment_GetHistoryShipmentLinesTableAdapter(Component):
""" Shipment_GetHistoryShipmentLinesTableAdapter() """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return Shipment_GetHistoryShipmentLinesTableAdapter()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Dispose(self):
"""
Dispose(self: Component,disposing: bool)
Releases the unmanaged resources used by the System.ComponentModel.Component and optionally releases the managed resources.
disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources.
"""
pass
def Fill(self,dataTable,Id):
""" Fill(self: Shipment_GetHistoryShipmentLinesTableAdapter,dataTable: Shipment_GetHistoryShipmentLinesDataTable,Id: Nullable[int]) -> int """
pass
def GetData(self,Id):
""" GetData(self: Shipment_GetHistoryShipmentLinesTableAdapter,Id: Nullable[int]) -> Shipment_GetHistoryShipmentLinesDataTable """
pass
def GetService(self,*args):
"""
GetService(self: Component,service: Type) -> object
Returns an object that represents a service provided by the System.ComponentModel.Component or by its System.ComponentModel.Container.
service: A service provided by the System.ComponentModel.Component.
Returns: An System.Object that represents a service provided by the System.ComponentModel.Component,or null if the System.ComponentModel.Component does not provide the specified
service.
"""
pass
def MemberwiseClone(self,*args):
"""
MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject
Creates a shallow copy of the current System.MarshalByRefObject object.
cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting
boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls
to be routed to the remote server object.
Returns: A shallow copy of the current System.MarshalByRefObject object.
MemberwiseClone(self: object) -> object
Creates a shallow copy of the current System.Object.
Returns: A shallow copy of the current System.Object.
"""
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
Adapter=property(lambda self: object(),lambda self,v: None,lambda self: None)
CanRaiseEvents=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the component can raise an event.
"""
ClearBeforeFill=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: ClearBeforeFill(self: Shipment_GetHistoryShipmentLinesTableAdapter) -> bool
Set: ClearBeforeFill(self: Shipment_GetHistoryShipmentLinesTableAdapter)=value
"""
CommandCollection=property(lambda self: object(),lambda self,v: None,lambda self: None)
DesignMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether the System.ComponentModel.Component is currently in design mode.
"""
Events=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the list of event handlers that are attached to this System.ComponentModel.Component.
"""
class TableAdapterManager(Component):
""" TableAdapterManager() """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return TableAdapterManager()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Dispose(self):
"""
Dispose(self: Component,disposing: bool)
Releases the unmanaged resources used by the System.ComponentModel.Component and optionally releases the managed resources.
disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources.
"""
pass
def GetService(self,*args):
"""
GetService(self: Component,service: Type) -> object
Returns an object that represents a service provided by the System.ComponentModel.Component or by its System.ComponentModel.Container.
service: A service provided by the System.ComponentModel.Component.
Returns: An System.Object that represents a service provided by the System.ComponentModel.Component,or null if the System.ComponentModel.Component does not provide the specified
service.
"""
pass
def MatchTableAdapterConnection(self,*args):
""" MatchTableAdapterConnection(self: TableAdapterManager,inputConnection: IDbConnection) -> bool """
pass
def MemberwiseClone(self,*args):
"""
MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject
Creates a shallow copy of the current System.MarshalByRefObject object.
cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting
boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls
to be routed to the remote server object.
Returns: A shallow copy of the current System.MarshalByRefObject object.
MemberwiseClone(self: object) -> object
Creates a shallow copy of the current System.Object.
Returns: A shallow copy of the current System.Object.
"""
pass
def SortSelfReferenceRows(self,*args):
""" SortSelfReferenceRows(self: TableAdapterManager,rows: Array[DataRow],relation: DataRelation,childFirst: bool) """
pass
def UpdateAll(self,dataSet):
""" UpdateAll(self: TableAdapterManager,dataSet: DataSet) -> int """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
BackupDataSetBeforeUpdate=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: BackupDataSetBeforeUpdate(self: TableAdapterManager) -> bool
Set: BackupDataSetBeforeUpdate(self: TableAdapterManager)=value
"""
CanRaiseEvents=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the component can raise an event.
"""
Connection=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Connection(self: TableAdapterManager) -> IDbConnection
Set: Connection(self: TableAdapterManager)=value
"""
DesignMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether the System.ComponentModel.Component is currently in design mode.
"""
Events=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the list of event handlers that are attached to this System.ComponentModel.Component.
"""
TableAdapterInstanceCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: TableAdapterInstanceCount(self: TableAdapterManager) -> int
"""
UpdateOrder=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: UpdateOrder(self: TableAdapterManager) -> UpdateOrderOption
Set: UpdateOrder(self: TableAdapterManager)=value
"""
UpdateOrderOption=None
| 44.166667
| 215
| 0.743396
| 1,984
| 16,695
| 6.112903
| 0.092238
| 0.06184
| 0.066953
| 0.049472
| 0.846471
| 0.837731
| 0.837731
| 0.835092
| 0.835092
| 0.835092
| 0
| 0.000715
| 0.162324
| 16,695
| 377
| 216
| 44.28382
| 0.8665
| 0.554178
| 0
| 0.837607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.350427
| false
| 0.316239
| 0.008547
| 0
| 0.683761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
bec9ec8fcdb3977b237b54f8786c70eed543b14d
| 219
|
py
|
Python
|
src/grok/tests/grokker/priority_fixture.py
|
zopefoundation/grok
|
9c29591071aff824e6ef72acf8137bb21c0ea861
|
[
"ZPL-2.1"
] | 21
|
2015-11-06T08:54:43.000Z
|
2021-08-15T13:41:13.000Z
|
src/grok/tests/grokker/priority_fixture.py
|
zopefoundation/grok
|
9c29591071aff824e6ef72acf8137bb21c0ea861
|
[
"ZPL-2.1"
] | 8
|
2016-03-18T14:25:17.000Z
|
2020-06-10T23:17:26.000Z
|
src/grok/tests/grokker/priority_fixture.py
|
zopefoundation/grok
|
9c29591071aff824e6ef72acf8137bb21c0ea861
|
[
"ZPL-2.1"
] | 13
|
2015-09-19T22:42:44.000Z
|
2022-01-24T18:55:20.000Z
|
import grok.tests.grokker.priority
class AlphaSub(grok.tests.grokker.priority.Alpha):
pass
class BetaSub(grok.tests.grokker.priority.Beta):
pass
class GammaSub(grok.tests.grokker.priority.Gamma):
pass
| 15.642857
| 50
| 0.757991
| 29
| 219
| 5.724138
| 0.448276
| 0.216867
| 0.385542
| 0.578313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13242
| 219
| 13
| 51
| 16.846154
| 0.873684
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.428571
| 0.142857
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
bee0976da83c2db761532a0d629e0a52b5746e9e
| 64,006
|
py
|
Python
|
ionoscloud/api/backup_units_api.py
|
ionos-cloud/ionos-cloud-sdk-python
|
3c5804697c262898e6f6a438dc40e1b45a4bb5c9
|
[
"Apache-2.0"
] | null | null | null |
ionoscloud/api/backup_units_api.py
|
ionos-cloud/ionos-cloud-sdk-python
|
3c5804697c262898e6f6a438dc40e1b45a4bb5c9
|
[
"Apache-2.0"
] | null | null | null |
ionoscloud/api/backup_units_api.py
|
ionos-cloud/ionos-cloud-sdk-python
|
3c5804697c262898e6f6a438dc40e1b45a4bb5c9
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
import re # noqa: F401
import six
from ionoscloud.api_client import ApiClient
from ionoscloud.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class BackupUnitsApi(object):
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def backupunits_delete(self, backupunit_id, **kwargs): # noqa: E501
"""Delete backup units # noqa: E501
Remove the specified backup unit. This process will delete: 1) The backup plans inside the backup unit 2) All backups, associated with this backup unit 3) The backup user 4) The backup unit itself # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_delete(backupunit_id, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.backupunits_delete_with_http_info(backupunit_id, **kwargs) # noqa: E501
def backupunits_delete_with_http_info(self, backupunit_id, **kwargs): # noqa: E501
"""Delete backup units # noqa: E501
Remove the specified backup unit. This process will delete: 1) The backup plans inside the backup unit 2) All backups, associated with this backup unit 3) The backup user 4) The backup unit itself # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_delete_with_http_info(backupunit_id, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'backupunit_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method backupunits_delete" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'backupunit_id' is set
if self.api_client.client_side_validation and ('backupunit_id' not in local_var_params or # noqa: E501
local_var_params['backupunit_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `backupunit_id` when calling `backupunits_delete`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_delete`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_delete`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'backupunit_id' in local_var_params:
path_params['backupunitId'] = local_var_params['backupunit_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = None
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/backupunits/{backupunitId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def backupunits_find_by_id(self, backupunit_id, **kwargs): # noqa: E501
"""Retrieve backup units # noqa: E501
Retrieve the properties of the specified backup unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_find_by_id(backupunit_id, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BackupUnit
"""
kwargs['_return_http_data_only'] = True
return self.backupunits_find_by_id_with_http_info(backupunit_id, **kwargs) # noqa: E501
def backupunits_find_by_id_with_http_info(self, backupunit_id, **kwargs): # noqa: E501
"""Retrieve backup units # noqa: E501
Retrieve the properties of the specified backup unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_find_by_id_with_http_info(backupunit_id, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BackupUnit, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'backupunit_id',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method backupunits_find_by_id" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'backupunit_id' is set
if self.api_client.client_side_validation and ('backupunit_id' not in local_var_params or # noqa: E501
local_var_params['backupunit_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `backupunit_id` when calling `backupunits_find_by_id`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_find_by_id`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_find_by_id`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'backupunit_id' in local_var_params:
path_params['backupunitId'] = local_var_params['backupunit_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'BackupUnit'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/backupunits/{backupunitId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def backupunits_get(self, **kwargs): # noqa: E501
"""List backup units # noqa: E501
List all available backup units. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_get(async_req=True)
>>> result = thread.get()
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BackupUnits
"""
kwargs['_return_http_data_only'] = True
return self.backupunits_get_with_http_info(**kwargs) # noqa: E501
def backupunits_get_with_http_info(self, **kwargs): # noqa: E501
"""List backup units # noqa: E501
List all available backup units. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_get_with_http_info(async_req=True)
>>> result = thread.get()
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BackupUnits, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method backupunits_get" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_get`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_get`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'BackupUnits'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/backupunits', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def backupunits_patch(self, backupunit_id, backup_unit, **kwargs): # noqa: E501
"""Partially modify backup units # noqa: E501
Update the properties of the specified backup unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_patch(backupunit_id, backup_unit, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param backup_unit: The properties of the backup unit to be updated. (required)
:type backup_unit: BackupUnitProperties
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BackupUnit
"""
kwargs['_return_http_data_only'] = True
return self.backupunits_patch_with_http_info(backupunit_id, backup_unit, **kwargs) # noqa: E501
def backupunits_patch_with_http_info(self, backupunit_id, backup_unit, **kwargs): # noqa: E501
"""Partially modify backup units # noqa: E501
Update the properties of the specified backup unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_patch_with_http_info(backupunit_id, backup_unit, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param backup_unit: The properties of the backup unit to be updated. (required)
:type backup_unit: BackupUnitProperties
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BackupUnit, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'backupunit_id',
'backup_unit',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method backupunits_patch" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'backupunit_id' is set
if self.api_client.client_side_validation and ('backupunit_id' not in local_var_params or # noqa: E501
local_var_params['backupunit_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `backupunit_id` when calling `backupunits_patch`") # noqa: E501
# verify the required parameter 'backup_unit' is set
if self.api_client.client_side_validation and ('backup_unit' not in local_var_params or # noqa: E501
local_var_params['backup_unit'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `backup_unit` when calling `backupunits_patch`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_patch`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_patch`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'backupunit_id' in local_var_params:
path_params['backupunitId'] = local_var_params['backupunit_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'backup_unit' in local_var_params:
body_params = local_var_params['backup_unit']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'BackupUnit'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/backupunits/{backupunitId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def backupunits_post(self, backup_unit, **kwargs): # noqa: E501
"""Create backup units # noqa: E501
Create a backup unit. Backup units are resources, same as storage volumes or snapshots; they can be shared through groups in User management. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_post(backup_unit, async_req=True)
>>> result = thread.get()
:param backup_unit: The backup unit to create. (required)
:type backup_unit: BackupUnit
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BackupUnit
"""
kwargs['_return_http_data_only'] = True
return self.backupunits_post_with_http_info(backup_unit, **kwargs) # noqa: E501
def backupunits_post_with_http_info(self, backup_unit, **kwargs): # noqa: E501
"""Create backup units # noqa: E501
Create a backup unit. Backup units are resources, same as storage volumes or snapshots; they can be shared through groups in User management. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_post_with_http_info(backup_unit, async_req=True)
>>> result = thread.get()
:param backup_unit: The backup unit to create. (required)
:type backup_unit: BackupUnit
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BackupUnit, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'backup_unit',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method backupunits_post" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'backup_unit' is set
if self.api_client.client_side_validation and ('backup_unit' not in local_var_params or # noqa: E501
local_var_params['backup_unit'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `backup_unit` when calling `backupunits_post`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_post`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_post`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'backup_unit' in local_var_params:
body_params = local_var_params['backup_unit']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'BackupUnit'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/backupunits', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def backupunits_put(self, backupunit_id, backup_unit, **kwargs): # noqa: E501
"""Modify backup units # noqa: E501
Modify the properties of the specified backup unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_put(backupunit_id, backup_unit, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param backup_unit: The modified backup unit. (required)
:type backup_unit: BackupUnit
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BackupUnit
"""
kwargs['_return_http_data_only'] = True
return self.backupunits_put_with_http_info(backupunit_id, backup_unit, **kwargs) # noqa: E501
def backupunits_put_with_http_info(self, backupunit_id, backup_unit, **kwargs): # noqa: E501
"""Modify backup units # noqa: E501
Modify the properties of the specified backup unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_put_with_http_info(backupunit_id, backup_unit, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param backup_unit: The modified backup unit. (required)
:type backup_unit: BackupUnit
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param depth: Controls the detail depth of the response objects. GET /datacenters/[ID] - depth=0: Only direct properties are included; children (servers and other elements) are not included. - depth=1: Direct properties and children references are included. - depth=2: Direct properties and children properties are included. - depth=3: Direct properties and children properties and children's children are included. - depth=... and so on
:type depth: int
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BackupUnit, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'backupunit_id',
'backup_unit',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method backupunits_put" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'backupunit_id' is set
if self.api_client.client_side_validation and ('backupunit_id' not in local_var_params or # noqa: E501
local_var_params['backupunit_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `backupunit_id` when calling `backupunits_put`") # noqa: E501
# verify the required parameter 'backup_unit' is set
if self.api_client.client_side_validation and ('backup_unit' not in local_var_params or # noqa: E501
local_var_params['backup_unit'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `backup_unit` when calling `backupunits_put`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_put`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `backupunits_put`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'backupunit_id' in local_var_params:
path_params['backupunitId'] = local_var_params['backupunit_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'backup_unit' in local_var_params:
body_params = local_var_params['backup_unit']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'BackupUnit'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/backupunits/{backupunitId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def backupunits_ssourl_get(self, backupunit_id, **kwargs): # noqa: E501
"""Retrieve BU single sign-on URLs # noqa: E501
Retrieve a single sign-on URL for the specified backup unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_ssourl_get(backupunit_id, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BackupUnitSSO
"""
kwargs['_return_http_data_only'] = True
return self.backupunits_ssourl_get_with_http_info(backupunit_id, **kwargs) # noqa: E501
def backupunits_ssourl_get_with_http_info(self, backupunit_id, **kwargs): # noqa: E501
"""Retrieve BU single sign-on URLs # noqa: E501
Retrieve a single sign-on URL for the specified backup unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.backupunits_ssourl_get_with_http_info(backupunit_id, async_req=True)
>>> result = thread.get()
:param backupunit_id: The unique ID of the backup unit. (required)
:type backupunit_id: str
:param pretty: Controls whether the response is pretty-printed (with indentations and new lines).
:type pretty: bool
:param x_contract_number: Users with multiple contracts must provide the contract number, for which all API requests are to be executed.
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BackupUnitSSO, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'backupunit_id',
'pretty',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type',
'query_params'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method backupunits_ssourl_get" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'backupunit_id' is set
if self.api_client.client_side_validation and ('backupunit_id' not in local_var_params or # noqa: E501
local_var_params['backupunit_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `backupunit_id` when calling `backupunits_ssourl_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'backupunit_id' in local_var_params:
path_params['backupunitId'] = local_var_params['backupunit_id'] # noqa: E501
query_params = list(local_var_params.get('query_params', {}).items())
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'BackupUnitSSO'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/backupunits/{backupunitId}/ssourl', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 54.940773
| 451
| 0.635347
| 7,617
| 64,006
| 5.109492
| 0.032165
| 0.048305
| 0.079498
| 0.019733
| 0.988258
| 0.985483
| 0.984635
| 0.982579
| 0.982348
| 0.977851
| 0
| 0.013687
| 0.293441
| 64,006
| 1,164
| 452
| 54.987973
| 0.846895
| 0.49436
| 0
| 0.802239
| 0
| 0.022388
| 0.227015
| 0.027568
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027985
| false
| 0
| 0.009328
| 0
| 0.065299
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
830d60415b872bf05582d251897292653a059f3b
| 41
|
py
|
Python
|
ifelse/cores.py
|
viniTWL/Python-Projects
|
1c4d2417efd896623263287b1d2391f7c551674c
|
[
"MIT"
] | 1
|
2021-10-17T13:00:19.000Z
|
2021-10-17T13:00:19.000Z
|
ifelse/cores.py
|
viniTWL/Python-Projects
|
1c4d2417efd896623263287b1d2391f7c551674c
|
[
"MIT"
] | null | null | null |
ifelse/cores.py
|
viniTWL/Python-Projects
|
1c4d2417efd896623263287b1d2391f7c551674c
|
[
"MIT"
] | null | null | null |
print('\033[1;35mTeste de Cores.\033[m')
| 20.5
| 40
| 0.682927
| 8
| 41
| 3.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.236842
| 0.073171
| 41
| 1
| 41
| 41
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0.756098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
8366e97a3e7cca5b33588ab1542999cbbd54e52b
| 206
|
py
|
Python
|
data/external/repositories_2to3/197772/kaggle_grasp_and_lift_eeg_detection-master/code/nets/net_stf7c.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/197772/kaggle_grasp_and_lift_eeg_detection-master/code/nets/net_stf7c.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/197772/kaggle_grasp_and_lift_eeg_detection-master/code/nets/net_stf7c.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | 1
|
2019-12-04T08:23:33.000Z
|
2019-12-04T08:23:33.000Z
|
from . import net_stf7
def create_net(train_source, test_source, **kwargs):
return net_stf7.create_net(train_source, test_source, filter0_width=21, filter1_num=128, filter2_num=256, **kwargs)
| 29.428571
| 120
| 0.757282
| 31
| 206
| 4.677419
| 0.612903
| 0.096552
| 0.193103
| 0.275862
| 0.413793
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0.073446
| 0.140777
| 206
| 6
| 121
| 34.333333
| 0.745763
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
366d5fa8ecf2cb8ad4cd118f25d96cc1a45e339d
| 41
|
py
|
Python
|
feeders/__init__.py
|
PINTO0309/MS-G3D
|
5f0f7740ed8543bd0e288affca2a76541c83669e
|
[
"MIT"
] | 343
|
2020-04-01T08:50:31.000Z
|
2022-03-28T15:08:18.000Z
|
feeders/__init__.py
|
PINTO0309/MS-G3D
|
5f0f7740ed8543bd0e288affca2a76541c83669e
|
[
"MIT"
] | 52
|
2020-04-16T12:00:57.000Z
|
2022-03-11T12:17:55.000Z
|
feeders/__init__.py
|
PINTO0309/MS-G3D
|
5f0f7740ed8543bd0e288affca2a76541c83669e
|
[
"MIT"
] | 89
|
2020-04-05T01:37:50.000Z
|
2022-03-17T02:37:48.000Z
|
from . import tools
from . import feeder
| 13.666667
| 20
| 0.756098
| 6
| 41
| 5.166667
| 0.666667
| 0.645161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195122
| 41
| 2
| 21
| 20.5
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
367a3fcde189a0a27442a6951b21276a6d714094
| 3,240
|
py
|
Python
|
src/ctc/protocols/rari_utils/fuse_queries/token_state/token_interest.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 94
|
2022-02-15T19:34:49.000Z
|
2022-03-26T19:26:22.000Z
|
src/ctc/protocols/rari_utils/fuse_queries/token_state/token_interest.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-03-03T02:58:47.000Z
|
2022-03-11T18:41:05.000Z
|
src/ctc/protocols/rari_utils/fuse_queries/token_state/token_interest.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-02-15T17:53:07.000Z
|
2022-03-17T19:14:17.000Z
|
from __future__ import annotations
import typing
from ctc import rpc
from ctc import spec
from .. import irm_metadata
from .. import token_metadata
from ... import rari_abis
async def async_get_supply_interest_per_block(
ctoken: spec.Address,
block: spec.BlockNumberReference = 'latest',
normalize: bool = True,
fill_empty: bool = False,
empty_token: typing.Any = None,
) -> int | float | None:
result = await rpc.async_eth_call(
to_address=ctoken,
block_number=block,
function_abi=rari_abis.ctoken_function_abis['supplyRatePerBlock'],
fill_empty=fill_empty,
empty_token=empty_token,
)
if fill_empty and result is None:
return None
if normalize:
result /= 1e18
return result
async def async_get_borrow_interest_per_block(
ctoken: spec.Address,
block: spec.BlockNumberReference = 'latest',
normalize: bool = True,
fill_empty: bool = False,
empty_token: typing.Any = None,
) -> int | float | None:
result = await rpc.async_eth_call(
to_address=ctoken,
block_number=block,
function_abi=rari_abis.ctoken_function_abis['borrowRatePerBlock'],
fill_empty=fill_empty,
empty_token=empty_token,
)
if fill_empty and result is None:
return None
if normalize:
result /= 1e18
return result
async def async_get_supply_apy(
ctoken: spec.Address,
blocks_per_year: int | None = None,
block: spec.BlockNumberReference = 'latest',
fill_empty: bool = False,
empty_token: typing.Any = None,
) -> float | None:
supply_interest_per_block = await async_get_supply_interest_per_block(
ctoken=ctoken,
block=block,
normalize=True,
fill_empty=fill_empty,
empty_token=empty_token,
)
if supply_interest_per_block is None:
if fill_empty:
return None
else:
raise Exception('could not determine supply interest per block')
if blocks_per_year is None:
irm = await token_metadata.async_get_ctoken_irm(ctoken, block=block)
blocks_per_year = await irm_metadata.async_get_irm_blocks_per_year(
irm,
block=block,
)
return (1 + supply_interest_per_block) ** blocks_per_year - 1
async def async_get_borrow_apy(
ctoken: spec.Address,
blocks_per_year: int | None = None,
block: spec.BlockNumberReference = 'latest',
fill_empty: bool = False,
empty_token: typing.Any = None,
) -> float | None:
borrow_interest_per_block = await async_get_borrow_interest_per_block(
ctoken=ctoken,
block=block,
normalize=True,
fill_empty=fill_empty,
empty_token=empty_token,
)
if borrow_interest_per_block is None:
if fill_empty:
return None
else:
raise Exception('could not determine supply interest per block')
if blocks_per_year is None:
irm = await token_metadata.async_get_ctoken_irm(ctoken, block=block)
blocks_per_year = await irm_metadata.async_get_irm_blocks_per_year(
irm,
block=block,
)
return (1 + borrow_interest_per_block) ** blocks_per_year - 1
| 28.421053
| 76
| 0.669444
| 410
| 3,240
| 4.970732
| 0.146341
| 0.070658
| 0.09421
| 0.075564
| 0.898921
| 0.881256
| 0.860648
| 0.807655
| 0.807655
| 0.807655
| 0
| 0.004156
| 0.257407
| 3,240
| 113
| 77
| 28.672566
| 0.842893
| 0
| 0
| 0.742268
| 0
| 0
| 0.046311
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.072165
| 0
| 0.154639
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36a1e52c92505f83696873ce0f65bc084edf8908
| 6,185
|
py
|
Python
|
Obfuscated_code_or_piece_of_art.py
|
IEWbgfnYDwHRoRRSKtkdyMDUzgdwuBYgDKtDJWd/narnt
|
0eda13a7b8663e218b4fe2e06a974b99db9ff166
|
[
"MIT"
] | 102
|
2016-06-25T09:30:00.000Z
|
2022-03-24T21:02:49.000Z
|
Obfuscated_code_or_piece_of_art.py
|
operade/notebooks
|
56f97e33e81b5e86905961b09184a41b7616fa90
|
[
"MIT"
] | 34
|
2016-06-26T12:21:30.000Z
|
2021-04-06T09:19:49.000Z
|
Obfuscated_code_or_piece_of_art.py
|
operade/notebooks
|
56f97e33e81b5e86905961b09184a41b7616fa90
|
[
"MIT"
] | 44
|
2017-05-13T23:54:56.000Z
|
2021-07-17T15:34:24.000Z
|
# coding: utf-8
# # Table of Contents
# <p><div class="lev1 toc-item"><a href="#Obfuscated-code-or-piece-of-art?" data-toc-modified-id="Obfuscated-code-or-piece-of-art?-1"><span class="toc-item-num">1 </span>Obfuscated code or piece of art?</a></div><div class="lev2 toc-item"><a href="#Mandelbrot-set" data-toc-modified-id="Mandelbrot-set-11"><span class="toc-item-num">1.1 </span>Mandelbrot set</a></div><div class="lev2 toc-item"><a href="#Penrose-patterns" data-toc-modified-id="Penrose-patterns-12"><span class="toc-item-num">1.2 </span>Penrose patterns</a></div><div class="lev2 toc-item"><a href="#Bitcoin-address-&-private-key-generator" data-toc-modified-id="Bitcoin-address-&-private-key-generator-13"><span class="toc-item-num">1.3 </span>Bitcoin address & private key generator</a></div>
# # Obfuscated code or piece of art?
#
# This short notebooks shows a few examples of Python code, designed to draw something, and shaped as what they will draw...
# ## Mandelbrot set
#
# This nice little code will write a visualization of the [Mandelbrot set](https://en.wikipedia.org/wiki/Mandelbrot_set), on the domain $[-3, 3] \times [-3i, 3i]$, for $1500 \times 1500$ points, as a Bitmap (written manually in binary).
# In[2]:
get_ipython().run_cell_magic('time', '', "b_ = (\n 255,\n lambda\n V ,B,c\n :c and Y(V*V+B,B, c\n -1)if(abs(V)<6)else\n ( 2+c-4*abs(V)**-0.4)/i\n ) ;v, x=1500,1000;C=range(v*x\n );import struct;P=struct.pack;M,\\\n j ='<QIIHHHH',open('art/M.bmp','wb').write\nfor X in j('BM'+P(M,v*x*3+26,26,12,v,x,1,24))or C:\n i ,Y=_;j(P('BBB',*(lambda T:(T*80+T**9\n *i-950*T **99,T*70-880*T**18+701*\n T **9 ,T*i**(1-T**45*2)))(sum(\n [ Y(0,(A%3/3.+X%v+(X/v+\n A/3/3.-x/2)/1j)*2.5\n /x -2.7,i)**2 for \\\n A in C\n [:9]])\n /9)\n ) )")
# 
# ----
#
# ## Penrose patterns
#
# This second nice little code will write a visualization of a [Penrose tiling (infinite pattern)](https://en.wikipedia.org/wiki/Penrose_tiling) to a PNG image, of resolution $2000 \times 2000$.
# In[3]:
get_ipython().run_cell_magic('time', '', '_ =\\\n """if!\n 1:"e,V=200\n 0,(0j-1)**-.2;\n v,S=.5/ V.real,\n [(0,0,4 *e,4*e*\n V)];w=1 -v"def!\n E(T,A, B,C):P\n ,Q,R=B*w+ A*v,B*w+C\n *v,A*w+B*v;retur n[(1,Q,C,A),(1,P\n ,Q,B),(0,Q,P,A)]*T+[(0,C ,R,B),(1,R,C,A)]*(1-T)"f\nor!i!in!_[:11]:S =sum([E (*x)for !x!in!S],[])"imp\n ort!cair o!as!O; s=O.Ima geSurfac\n e(1,e,e) ;c=O.Con text(s); M,L,G=c.\n move_to ,c.line_to,c.s et_sour\n ce_rgb a"def!z(f,a) :f(-a.\n imag,a. real-e-e)"for!T,A,B,C!in[i !for!i!\n in!S!if!i[""";exec(reduce(lambda x,i:x.replace(chr\n (i),"\\n "[34-i:]), range( 35),_+"""0]]:z(M,A\n );z(L,B);z (L,C); c.close_pa\n th()"G (.4,.3 ,1);c.\n paint( );G(.7 ,.7,1)\n ;c.fil l()"fo r!i!in\n !range (9):"! g=1-i/\n 8;d=i/ 4*g;G(d,d,d, 1-g*.8\n )"!def !y(f,a):z(f,a+(1+2j)*( 1j**(i\n /2.))*g)"!for!T,A,B,C!in!S:y(M,C);y(L,A);y(M\n ,A);y(L,B)"!c.st roke()"s.write_t\n o_png(\'art/ penrose.png\')\n """ ))')
# 
# ----
#
# ## Bitcoin address & private key generator
#
# This is the most concise (and the most sexy!) implementation of the Bitcoin protocol to generate a new address and private key!
# In[26]:
get_ipython().run_cell_magic('time', '', '_ =r"""A(W/2,*M(3*G\n *G*V(2*J%P),G,J,G)+((M((J-T\n )*V((G-S)%P),S,T,G)if(S@(G,J))if(\n W%2@(S,T)))if(W@(S,T);H=2**256;import&h\n ashlib&as&h,os,re,bi nascii&as&k;J$:int(\n k.b2a_hex(W),16);C$:C (W/ 58)+[W%58]if(W@\n [];X=h.new("rip em d160");Y$:h.sha25\n 6(W).digest();I$ d=32:I(W/256,d-1)+\n chr(W%256)if(d>0@""; U$:J(k.a2b_base\n 64(W));f=J(os.urando m(64)) %(H-U("AUVRIxl\nQt1/EQC2hcy/JvsA="))+ 1;M$Q,R,G :((W*W-Q-G)%P,\n(W*(G+2*Q-W*W)-R)%P) ;P=H-2** 32-977;V$Q=P,L=\n1,O=0:V(Q%W,W,O-Q/W* L,L)if(W@O%P;S,\nT=A(f,U("eb5mfvncu6 xVoGKVzocLBwKb/Nst\nzijZWfKBWxb4F5g="), U("SDra dyajxGVdpPv8DhEI\nqP0XtEimhVQZnEfQj/ sQ1Lg="), 0,0);F$:"1"+F(W\n [1:])if(W[:1 ]=="\\0"@"" .join(map(B,C(\n J(W))));K$: F(W +Y(Y(W))[:4]);\n X.update(Y("\\4"+ I(S)+I(T)));B$\n :re.sub("[0OIl _]| [^\\\\w]","","".jo\n in(map(chr,ra nge (123))))[W];print"Addre\n ss:",K("\\0"+X.dig est())+"\\nPrivkey:",K(\n "\\x80"+I(f))""";exec(reduce(lambda W,X:\n W.replace(*X),zip(" \\n&$@",["","",\n " ","=lambda W,",")else "])\n ,"A$G,J,S,T:"+_))')
# ----
#
# Disclaimer: I am *not* the author of these small examples!
#
# > That's it for today!
| 114.537037
| 1,761
| 0.420372
| 1,009
| 6,185
| 2.549058
| 0.25669
| 0.021773
| 0.012442
| 0.018663
| 0.231726
| 0.213841
| 0.062986
| 0.062986
| 0.032659
| 0
| 0
| 0.055808
| 0.359741
| 6,185
| 53
| 1,762
| 116.698113
| 0.593687
| 0.29038
| 0
| 0
| 0
| 1
| 0.95168
| 0.216981
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
36aea0f0c9a9e81b530163f675635edb2dfc07c6
| 107,131
|
py
|
Python
|
docusign_rooms/apis/rooms_api.py
|
docusign/docusign-rooms-python-client
|
eb8cfd0e487a87bc43d54bfa9de784eb170a40d3
|
[
"MIT"
] | 2
|
2020-11-11T01:38:45.000Z
|
2021-11-14T17:00:43.000Z
|
docusign_rooms/apis/rooms_api.py
|
docusign/docusign-rooms-python-client
|
eb8cfd0e487a87bc43d54bfa9de784eb170a40d3
|
[
"MIT"
] | null | null | null |
docusign_rooms/apis/rooms_api.py
|
docusign/docusign-rooms-python-client
|
eb8cfd0e487a87bc43d54bfa9de784eb170a40d3
|
[
"MIT"
] | 1
|
2021-11-14T17:00:32.000Z
|
2021-11-14T17:00:32.000Z
|
# coding: utf-8
"""
DocuSign Rooms API - v2
An API for an integrator to access the features of DocuSign Rooms # noqa: E501
OpenAPI spec version: v2
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..client.configuration import Configuration
from ..client.api_client import ApiClient
class RoomsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_document_to_room(self, room_id, account_id, **kwargs):
"""
Add a document to a room.
Add a document to a room
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_document_to_room(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param Document body:
:return: RoomDocument
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_document_to_room_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.add_document_to_room_with_http_info(room_id, account_id, **kwargs)
return data
def add_document_to_room_with_http_info(self, room_id, account_id, **kwargs):
"""
Add a document to a room.
Add a document to a room
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_document_to_room_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param Document body:
:return: RoomDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_document_to_room" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `add_document_to_room`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `add_document_to_room`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/documents'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoomDocument',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_document_to_room_via_file_upload(self, room_id, account_id, **kwargs):
"""
Add a document to a room via file contents upload.
This method uploads the contents of file as a room document for the room that you specify.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_document_to_room_via_file_upload(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param file file:
:return: RoomDocument
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_document_to_room_via_file_upload_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.add_document_to_room_via_file_upload_with_http_info(room_id, account_id, **kwargs)
return data
def add_document_to_room_via_file_upload_with_http_info(self, room_id, account_id, **kwargs):
"""
Add a document to a room via file contents upload.
This method uploads the contents of file as a room document for the room that you specify.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_document_to_room_via_file_upload_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param file file:
:return: RoomDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'file']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_document_to_room_via_file_upload" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `add_document_to_room_via_file_upload`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `add_document_to_room_via_file_upload`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/documents/contents'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file']
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoomDocument',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_form_to_room(self, room_id, account_id, **kwargs):
"""
Adds a DocuSign Form to a room
Adds a form to a room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_form_to_room(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: Id of the room to which the DocuSign Form is being added (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param FormForAdd body:
:return: RoomDocument
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_form_to_room_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.add_form_to_room_with_http_info(room_id, account_id, **kwargs)
return data
def add_form_to_room_with_http_info(self, room_id, account_id, **kwargs):
"""
Adds a DocuSign Form to a room
Adds a form to a room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_form_to_room_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: Id of the room to which the DocuSign Form is being added (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param FormForAdd body:
:return: RoomDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_form_to_room" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `add_form_to_room`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `add_form_to_room`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/forms'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoomDocument',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_room(self, account_id, **kwargs):
"""
Creates a new Room
Creates a new Room
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_room(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param RoomForCreate body:
:return: Room
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_room_with_http_info(account_id, **kwargs)
else:
(data) = self.create_room_with_http_info(account_id, **kwargs)
return data
def create_room_with_http_info(self, account_id, **kwargs):
"""
Creates a new Room
Creates a new Room
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_room_with_http_info(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param RoomForCreate body:
:return: Room
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_room" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_room`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Room',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_room(self, room_id, account_id, **kwargs):
"""
Deletes the room having the given room ID.
Deletes the room having the given room ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_room(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: ID of the room to be deleted. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_room_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.delete_room_with_http_info(room_id, account_id, **kwargs)
return data
def delete_room_with_http_info(self, room_id, account_id, **kwargs):
"""
Deletes the room having the given room ID.
Deletes the room having the given room ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_room_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: ID of the room to be deleted. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_room" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `delete_room`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_room`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_assignable_roles(self, room_id, account_id, **kwargs):
"""
Returns the roles for which the calling user, based on their role within the room, can assign to invitees.
This method returns the room-level roles that the current user can assign to the members that they invite to a room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_assignable_roles(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param str assignee_email: Optional parameter indicating to only return roles (internal/external) assignable to this user.
:param str filter: A search filter that returns assignable roles by the beginning of the role name.
:param int start_position: The index position within the total result set from which to start returning values. The default value is `0`.
:param int count: The number of results to return. This value must be a number between `1` and `100` (default).
:return: AssignableRoles
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_assignable_roles_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.get_assignable_roles_with_http_info(room_id, account_id, **kwargs)
return data
def get_assignable_roles_with_http_info(self, room_id, account_id, **kwargs):
"""
Returns the roles for which the calling user, based on their role within the room, can assign to invitees.
This method returns the room-level roles that the current user can assign to the members that they invite to a room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_assignable_roles_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param str assignee_email: Optional parameter indicating to only return roles (internal/external) assignable to this user.
:param str filter: A search filter that returns assignable roles by the beginning of the role name.
:param int start_position: The index position within the total result set from which to start returning values. The default value is `0`.
:param int count: The number of results to return. This value must be a number between `1` and `100` (default).
:return: AssignableRoles
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'assignee_email', 'filter', 'start_position', 'count']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_assignable_roles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `get_assignable_roles`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_assignable_roles`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/assignable_roles'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
if 'assignee_email' in params:
query_params['assigneeEmail'] = params['assignee_email']
if 'filter' in params:
query_params['filter'] = params['filter']
if 'start_position' in params:
query_params['startPosition'] = params['start_position']
if 'count' in params:
query_params['count'] = params['count']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AssignableRoles',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_documents(self, room_id, account_id, **kwargs):
"""
Get documents in the room accessible to the calling user.
This method returns a list of documents that the current user can access for a specific room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_documents(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param int count: Number of documents to return. Defaults to the maximum which is 100.
:param int start_position: Position of the first item in the total results. Defaults to 0.
:param bool require_content_for_dynamic_documents: Filter out any isDynamic documents without content. Defaults to false.
:param int room_folder_id: Filter documents by folderId. Defaults to null, to not filter on folderId.
:param str name_filter: Filter documents where Name contains the filter. Defaults to null, to not filter.
:param bool include_archived: Filter documents to have the same isArchived value as includeArchived. Defaults to true, to include archived documents.
:return: RoomDocumentList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_documents_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.get_documents_with_http_info(room_id, account_id, **kwargs)
return data
def get_documents_with_http_info(self, room_id, account_id, **kwargs):
"""
Get documents in the room accessible to the calling user.
This method returns a list of documents that the current user can access for a specific room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_documents_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param int count: Number of documents to return. Defaults to the maximum which is 100.
:param int start_position: Position of the first item in the total results. Defaults to 0.
:param bool require_content_for_dynamic_documents: Filter out any isDynamic documents without content. Defaults to false.
:param int room_folder_id: Filter documents by folderId. Defaults to null, to not filter on folderId.
:param str name_filter: Filter documents where Name contains the filter. Defaults to null, to not filter.
:param bool include_archived: Filter documents to have the same isArchived value as includeArchived. Defaults to true, to include archived documents.
:return: RoomDocumentList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'count', 'start_position', 'require_content_for_dynamic_documents', 'room_folder_id', 'name_filter', 'include_archived']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_documents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `get_documents`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_documents`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/documents'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
if 'count' in params:
query_params['count'] = params['count']
if 'start_position' in params:
query_params['startPosition'] = params['start_position']
if 'require_content_for_dynamic_documents' in params:
query_params['requireContentForDynamicDocuments'] = params['require_content_for_dynamic_documents']
if 'room_folder_id' in params:
query_params['roomFolderId'] = params['room_folder_id']
if 'name_filter' in params:
query_params['nameFilter'] = params['name_filter']
if 'include_archived' in params:
query_params['includeArchived'] = params['include_archived']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoomDocumentList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_room(self, room_id, account_id, **kwargs):
"""
Gets information about the given room.
Returns details about the given room
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_room(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param bool include_field_data: Indicates if field data (a.k.a., room details) should be included in the response. Defaults to false.
:return: Room
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_room_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.get_room_with_http_info(room_id, account_id, **kwargs)
return data
def get_room_with_http_info(self, room_id, account_id, **kwargs):
"""
Gets information about the given room.
Returns details about the given room
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_room_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param bool include_field_data: Indicates if field data (a.k.a., room details) should be included in the response. Defaults to false.
:return: Room
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'include_field_data']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_room" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `get_room`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_room`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
if 'include_field_data' in params:
query_params['includeFieldData'] = params['include_field_data']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Room',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_room_field_data(self, room_id, account_id, **kwargs):
"""
Returns the FieldData associated with the provided roomId.
Returns the field data associated with a room. This is the information that appears on the room's **Details** tab.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_room_field_data(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:return: FieldData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_room_field_data_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.get_room_field_data_with_http_info(room_id, account_id, **kwargs)
return data
def get_room_field_data_with_http_info(self, room_id, account_id, **kwargs):
"""
Returns the FieldData associated with the provided roomId.
Returns the field data associated with a room. This is the information that appears on the room's **Details** tab.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_room_field_data_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:return: FieldData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_room_field_data" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `get_room_field_data`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_room_field_data`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/field_data'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FieldData',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_room_field_set(self, room_id, account_id, **kwargs):
"""
Gets the field set associated with the room.
Returns the field set that a room uses.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_room_field_set(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:return: FieldSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_room_field_set_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.get_room_field_set_with_http_info(room_id, account_id, **kwargs)
return data
def get_room_field_set_with_http_info(self, room_id, account_id, **kwargs):
"""
Gets the field set associated with the room.
Returns the field set that a room uses.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_room_field_set_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:return: FieldSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_room_field_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `get_room_field_set`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_room_field_set`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/field_set'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FieldSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_room_users(self, room_id, account_id, **kwargs):
"""
Retrieves the list of users in the given room.
This method returns a list of users associated with a room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_room_users(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param int count: Number of room users to return. Defaults to the maximum which is 100.
:param int start_position: Position of the first item in the total results. Defaults to 0.
:param str filter: Returns room users filtered by Name and Email.
:param str sort: Sorts results. Options are FirstNameAsc, FirstNameDesc, LastNameAsc, LastNameDesc, EmailAsc, EmailDesc. Defaults to LastNameDesc
:return: RoomUsersResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_room_users_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.get_room_users_with_http_info(room_id, account_id, **kwargs)
return data
def get_room_users_with_http_info(self, room_id, account_id, **kwargs):
"""
Retrieves the list of users in the given room.
This method returns a list of users associated with a room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_room_users_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param int count: Number of room users to return. Defaults to the maximum which is 100.
:param int start_position: Position of the first item in the total results. Defaults to 0.
:param str filter: Returns room users filtered by Name and Email.
:param str sort: Sorts results. Options are FirstNameAsc, FirstNameDesc, LastNameAsc, LastNameDesc, EmailAsc, EmailDesc. Defaults to LastNameDesc
:return: RoomUsersResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'count', 'start_position', 'filter', 'sort']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_room_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `get_room_users`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_room_users`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/users'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
if 'count' in params:
query_params['count'] = params['count']
if 'start_position' in params:
query_params['startPosition'] = params['start_position']
if 'filter' in params:
query_params['filter'] = params['filter']
if 'sort' in params:
query_params['sort'] = params['sort']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoomUsersResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_rooms(self, account_id, **kwargs):
"""
Gets rooms available to the calling user.
Gets rooms available to the calling user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_rooms(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param int count: Number of rooms to return. Defaults to the maximum which is 100.
:param int start_position: Position of the first item in the total results. Defaults to 0.
:param str room_status: Status of the rooms to return. Defaults to \"Active\".
:param int office_id: Only return rooms in this office.
:param str field_data_changed_start_date: Fields data changed start datetime in UTC. Valid formats: yyyy-mm-dd hh:mm:ss or yyyy/mm/dd hh:mm:ss -Time is optional and will default to 00:00:00.
:param str field_data_changed_end_date: Fields-data changed end DateTime in UTC. Valid formats: yyyy-mm-dd hh:mm:ss or yyyy/mm/dd hh:mm:ss -Time is optional and will default to 00:00:00.
:param str room_closed_start_date: Room closed start datetime in UTC. Valid formats: yyyy-mm-dd hh:mm:ss or yyyy/mm/dd hh:mm:ss -Time is optional and will default to 00:00:00.
:param str room_closed_end_date: Room closed end datetime in UTC. Valid formats: yyyy-mm-dd hh:mm:ss or yyyy/mm/dd hh:mm:ss -Time is optional and will default to 00:00:00.
:return: RoomSummaryList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_rooms_with_http_info(account_id, **kwargs)
else:
(data) = self.get_rooms_with_http_info(account_id, **kwargs)
return data
def get_rooms_with_http_info(self, account_id, **kwargs):
"""
Gets rooms available to the calling user.
Gets rooms available to the calling user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_rooms_with_http_info(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param int count: Number of rooms to return. Defaults to the maximum which is 100.
:param int start_position: Position of the first item in the total results. Defaults to 0.
:param str room_status: Status of the rooms to return. Defaults to \"Active\".
:param int office_id: Only return rooms in this office.
:param str field_data_changed_start_date: Fields data changed start datetime in UTC. Valid formats: yyyy-mm-dd hh:mm:ss or yyyy/mm/dd hh:mm:ss -Time is optional and will default to 00:00:00.
:param str field_data_changed_end_date: Fields-data changed end DateTime in UTC. Valid formats: yyyy-mm-dd hh:mm:ss or yyyy/mm/dd hh:mm:ss -Time is optional and will default to 00:00:00.
:param str room_closed_start_date: Room closed start datetime in UTC. Valid formats: yyyy-mm-dd hh:mm:ss or yyyy/mm/dd hh:mm:ss -Time is optional and will default to 00:00:00.
:param str room_closed_end_date: Room closed end datetime in UTC. Valid formats: yyyy-mm-dd hh:mm:ss or yyyy/mm/dd hh:mm:ss -Time is optional and will default to 00:00:00.
:return: RoomSummaryList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'count', 'start_position', 'room_status', 'office_id', 'field_data_changed_start_date', 'field_data_changed_end_date', 'room_closed_start_date', 'room_closed_end_date']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_rooms" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_rooms`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
if 'count' in params:
query_params['count'] = params['count']
if 'start_position' in params:
query_params['startPosition'] = params['start_position']
if 'room_status' in params:
query_params['roomStatus'] = params['room_status']
if 'office_id' in params:
query_params['officeId'] = params['office_id']
if 'field_data_changed_start_date' in params:
query_params['fieldDataChangedStartDate'] = params['field_data_changed_start_date']
if 'field_data_changed_end_date' in params:
query_params['fieldDataChangedEndDate'] = params['field_data_changed_end_date']
if 'room_closed_start_date' in params:
query_params['roomClosedStartDate'] = params['room_closed_start_date']
if 'room_closed_end_date' in params:
query_params['roomClosedEndDate'] = params['room_closed_end_date']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoomSummaryList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def invite_user(self, room_id, account_id, **kwargs):
"""
Invites a user to the room by email address.
This method invites an existing or new member to a specific room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.invite_user(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param RoomInvite body:
:return: RoomInviteResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.invite_user_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.invite_user_with_http_info(room_id, account_id, **kwargs)
return data
def invite_user_with_http_info(self, room_id, account_id, **kwargs):
"""
Invites a user to the room by email address.
This method invites an existing or new member to a specific room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.invite_user_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param RoomInvite body:
:return: RoomInviteResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method invite_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `invite_user`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `invite_user`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/users'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoomInviteResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def put_room_user(self, room_id, user_id, account_id, **kwargs):
"""
Updates the specified user's role and transaction side.
Updates the specified user's role and transaction side.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_room_user(room_id, user_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param int user_id: The id of the user to update. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param RoomUserForUpdate body:
:return: RoomUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.put_room_user_with_http_info(room_id, user_id, account_id, **kwargs)
else:
(data) = self.put_room_user_with_http_info(room_id, user_id, account_id, **kwargs)
return data
def put_room_user_with_http_info(self, room_id, user_id, account_id, **kwargs):
"""
Updates the specified user's role and transaction side.
Updates the specified user's role and transaction side.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_room_user_with_http_info(room_id, user_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param int user_id: The id of the user to update. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param RoomUserForUpdate body:
:return: RoomUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'user_id', 'account_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_room_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `put_room_user`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `put_room_user`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `put_room_user`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/users/{userId}'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoomUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def restore_room_user_access(self, room_id, user_id, account_id, **kwargs):
"""
Restores the specified user's access to the room.
Restores the specified user's access to the room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.restore_room_user_access(room_id, user_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The room Id to restore access (required)
:param int user_id: The user Id getting restored to the room (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.restore_room_user_access_with_http_info(room_id, user_id, account_id, **kwargs)
else:
(data) = self.restore_room_user_access_with_http_info(room_id, user_id, account_id, **kwargs)
return data
def restore_room_user_access_with_http_info(self, room_id, user_id, account_id, **kwargs):
"""
Restores the specified user's access to the room.
Restores the specified user's access to the room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.restore_room_user_access_with_http_info(room_id, user_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The room Id to restore access (required)
:param int user_id: The user Id getting restored to the room (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'user_id', 'account_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method restore_room_user_access" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `restore_room_user_access`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `restore_room_user_access`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `restore_room_user_access`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/users/{userId}/restore_access'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def revoke_room_user_access(self, room_id, user_id, account_id, **kwargs):
"""
Revokes the specified user's access to the room.
Revokes the specified user's access to the room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.revoke_room_user_access(room_id, user_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The room Id to revoke access from (required)
:param int user_id: The user Id getting revoked from the room (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param RoomUserRemovalDetail body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.revoke_room_user_access_with_http_info(room_id, user_id, account_id, **kwargs)
else:
(data) = self.revoke_room_user_access_with_http_info(room_id, user_id, account_id, **kwargs)
return data
def revoke_room_user_access_with_http_info(self, room_id, user_id, account_id, **kwargs):
"""
Revokes the specified user's access to the room.
Revokes the specified user's access to the room.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.revoke_room_user_access_with_http_info(room_id, user_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The room Id to revoke access from (required)
:param int user_id: The user Id getting revoked from the room (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param RoomUserRemovalDetail body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'user_id', 'account_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method revoke_room_user_access" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `revoke_room_user_access`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `revoke_room_user_access`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `revoke_room_user_access`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/users/{userId}/revoke_access'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_picture(self, room_id, account_id, **kwargs):
"""
Update the picture for a room.
This endpoint supports the following content types, application/json as JSON PictureForUpdate{\"fileName\":\"string\", \"Base64Contents\":\"string\"}, multipart/formdata and any other streamed binary content type (as long as either query parameter fileName or request header ContentDisposition filename is included).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_picture(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: ID of the room the picture is for. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param file file:
:return: RoomPicture
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_picture_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.update_picture_with_http_info(room_id, account_id, **kwargs)
return data
def update_picture_with_http_info(self, room_id, account_id, **kwargs):
"""
Update the picture for a room.
This endpoint supports the following content types, application/json as JSON PictureForUpdate{\"fileName\":\"string\", \"Base64Contents\":\"string\"}, multipart/formdata and any other streamed binary content type (as long as either query parameter fileName or request header ContentDisposition filename is included).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_picture_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: ID of the room the picture is for. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param file file:
:return: RoomPicture
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'file']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_picture" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `update_picture`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_picture`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/picture'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file']
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoomPicture',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_room_field_data(self, room_id, account_id, **kwargs):
"""
Updates room field data.
Updates room field data.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_room_field_data(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param FieldDataForUpdate body:
:return: FieldData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_room_field_data_with_http_info(room_id, account_id, **kwargs)
else:
(data) = self.update_room_field_data_with_http_info(room_id, account_id, **kwargs)
return data
def update_room_field_data_with_http_info(self, room_id, account_id, **kwargs):
"""
Updates room field data.
Updates room field data.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_room_field_data_with_http_info(room_id, account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int room_id: The id of the room. (required)
:param str account_id: (Required) The globally unique identifier (GUID) for the account. (required)
:param FieldDataForUpdate body:
:return: FieldData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['room_id', 'account_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_room_field_data" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'room_id' is set
if ('room_id' not in params) or (params['room_id'] is None):
raise ValueError("Missing the required parameter `room_id` when calling `update_room_field_data`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_room_field_data`")
collection_formats = {}
resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/field_data'.replace('{format}', 'json')
path_params = {}
if 'room_id' in params:
path_params['roomId'] = params['room_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/plain', 'application/json', 'text/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FieldData',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 48.06236
| 324
| 0.59529
| 12,037
| 107,131
| 5.073773
| 0.030739
| 0.039788
| 0.020173
| 0.02235
| 0.972803
| 0.964731
| 0.961276
| 0.95841
| 0.957772
| 0.950928
| 0
| 0.001537
| 0.319898
| 107,131
| 2,228
| 325
| 48.083932
| 0.836687
| 0.35802
| 0
| 0.80991
| 0
| 0
| 0.215755
| 0.054499
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.006306
| 0
| 0.089189
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36b708c257d90e7320ee9b1d82e8fd2584c787e3
| 149
|
py
|
Python
|
comentarios.py
|
lordjack/oficina_introducao_programacao_python
|
d432e3d39cfb4790da5f3e75c2bf3ee86fa29f8e
|
[
"MIT"
] | null | null | null |
comentarios.py
|
lordjack/oficina_introducao_programacao_python
|
d432e3d39cfb4790da5f3e75c2bf3ee86fa29f8e
|
[
"MIT"
] | null | null | null |
comentarios.py
|
lordjack/oficina_introducao_programacao_python
|
d432e3d39cfb4790da5f3e75c2bf3ee86fa29f8e
|
[
"MIT"
] | null | null | null |
# Isso é um comentário
'''
Isso é um comentário de
várias linhas
3 aspas simples
'''
"""
Isso é um comentário de
várias linhas
3 aspas dupla
"""
| 10.642857
| 24
| 0.691275
| 24
| 149
| 4.291667
| 0.458333
| 0.145631
| 0.203884
| 0.495146
| 0.718447
| 0.718447
| 0.718447
| 0.718447
| 0.718447
| 0
| 0
| 0.017241
| 0.221477
| 149
| 13
| 25
| 11.461538
| 0.87069
| 0.510067
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
36c7b1c2c39da40a7aa82b3a1cf80937f4f94e42
| 2,963
|
py
|
Python
|
DistributeBTC/BIP44Tools.py
|
WouterGlorieux/BitcoinSpellbook-v0.1
|
982428470ba977182b2519ca8ab15fe8f94ee607
|
[
"MIT"
] | 1
|
2016-05-03T08:04:55.000Z
|
2016-05-03T08:04:55.000Z
|
DistributeBTC/BIP44Tools.py
|
ValyrianTech/BitcoinSpellbook-v0.1
|
982428470ba977182b2519ca8ab15fe8f94ee607
|
[
"MIT"
] | null | null | null |
DistributeBTC/BIP44Tools.py
|
ValyrianTech/BitcoinSpellbook-v0.1
|
982428470ba977182b2519ca8ab15fe8f94ee607
|
[
"MIT"
] | null | null | null |
import bitcoin
from mnemonic import *
from binascii import hexlify, unhexlify
import json
from pprint import pprint
HARDENED = 2**31
def getAddressesFromXPUB(xpub, i=10):
addressList = []
pub0 = bitcoin.bip32_ckd(xpub, 0)
for i in range (0, i):
publicKey = bitcoin.bip32_ckd(pub0, i)
hexKey = bitcoin.encode_pubkey(bitcoin.bip32_extract_key(publicKey), 'hex_compressed')
address_fromPub = bitcoin.pubtoaddr(hexKey)
addressList.append(address_fromPub)
return addressList
def getPrivKey(xpriv, i):
privkeys = {}
priv0 = bitcoin.bip32_ckd(xpriv, 0)
privateKey = bitcoin.bip32_ckd(priv0, i)
wifKey = bitcoin.encode_privkey(bitcoin.bip32_extract_key(privateKey), 'wif_compressed')
address_fromPriv = bitcoin.privtoaddr(wifKey)
privkeys[address_fromPriv] = wifKey
return privkeys
def getPrivKeys(xpriv, i=10):
privkeys = {}
priv0 = bitcoin.bip32_ckd(xpriv, 0)
for i in range (0, i):
privateKey = bitcoin.bip32_ckd(priv0, i)
wifKey = bitcoin.encode_privkey(bitcoin.bip32_extract_key(privateKey), 'wif_compressed')
address_fromPriv = bitcoin.privtoaddr(wifKey)
privkeys[address_fromPriv] = wifKey
return privkeys
def getChangeAddressesFromXPUB(xpub, i=10):
addressList = []
pub0 = bitcoin.bip32_ckd(xpub, 1)
for i in range (0, i):
publicKey = bitcoin.bip32_ckd(pub0, i)
hexKey = bitcoin.encode_pubkey(bitcoin.bip32_extract_key(publicKey), 'hex_compressed')
address_fromPub = bitcoin.pubtoaddr(hexKey)
addressList.append(address_fromPub)
return addressList
def getTrezorXPUBKeys(mnemonic, passphrase="", i=1):
myMnemonic = mnemonic
passphrase = passphrase
mnemo = Mnemonic('english')
seed = hexlify(mnemo.to_seed(myMnemonic, passphrase=passphrase))
priv = bitcoin.bip32_master_key(unhexlify(seed))
account = 0
derivedPrivateKey = bitcoin.bip32_ckd(bitcoin.bip32_ckd(bitcoin.bip32_ckd(priv, 44+HARDENED), HARDENED), HARDENED+account)
xpubs = []
for i in range(0, i):
derivedPrivateKey = bitcoin.bip32_ckd(bitcoin.bip32_ckd(bitcoin.bip32_ckd(priv, 44+HARDENED), HARDENED), HARDENED+i)
xpub = bitcoin.bip32_privtopub(derivedPrivateKey)
xpubs.append(xpub)
return xpubs
def getTrezorXPRIVKeys(mnemonic, passphrase="", i=1):
myMnemonic = mnemonic
passphrase = passphrase
mnemo = Mnemonic('english')
seed = hexlify(mnemo.to_seed(myMnemonic, passphrase=passphrase))
priv = bitcoin.bip32_master_key(unhexlify(seed))
account = 0
derivedPrivateKey = bitcoin.bip32_ckd(bitcoin.bip32_ckd(bitcoin.bip32_ckd(priv, 44+HARDENED), HARDENED), HARDENED+account)
xprivs = []
for i in range(0, i):
derivedPrivateKey = bitcoin.bip32_ckd(bitcoin.bip32_ckd(bitcoin.bip32_ckd(priv, 44+HARDENED), HARDENED), HARDENED+i)
xprivs.append(derivedPrivateKey)
return xprivs
| 28.219048
| 126
| 0.707391
| 349
| 2,963
| 5.859599
| 0.191977
| 0.158435
| 0.146699
| 0.086064
| 0.828362
| 0.828362
| 0.828362
| 0.788264
| 0.788264
| 0.748166
| 0
| 0.038365
| 0.190685
| 2,963
| 104
| 127
| 28.490385
| 0.814429
| 0
| 0
| 0.691176
| 0
| 0
| 0.023641
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0.088235
| 0.073529
| 0
| 0.25
| 0.014706
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
36d23a42da6b548883c4fcab93d162de379166b1
| 7,422
|
py
|
Python
|
storm_analysis/test/test_fitz_c.py
|
bintulab/storm-analysis
|
71ae493cbd17ddb97938d0ae2032d97a0eaa76b2
|
[
"CNRI-Python"
] | null | null | null |
storm_analysis/test/test_fitz_c.py
|
bintulab/storm-analysis
|
71ae493cbd17ddb97938d0ae2032d97a0eaa76b2
|
[
"CNRI-Python"
] | null | null | null |
storm_analysis/test/test_fitz_c.py
|
bintulab/storm-analysis
|
71ae493cbd17ddb97938d0ae2032d97a0eaa76b2
|
[
"CNRI-Python"
] | 1
|
2021-04-19T18:17:06.000Z
|
2021-04-19T18:17:06.000Z
|
#!/usr/bin/env python
"""
Tests of fitz_c fitting of Z values. This is used by
3D-DAOSTORM / sCMOS when using the '3d' model.
"""
import numpy
import storm_analysis
import storm_analysis.sa_library.parameters as params
import storm_analysis.sa_library.sa_h5py as saH5Py
import storm_analysis.sa_utilities.fitz_c as fitzC
def test_fitz_c_1():
"""
Test setting z values for raw localizations.
"""
# Load 3D parameters.
settings = storm_analysis.getData("test/data/test_3d_3d.xml")
parameters = params.ParametersDAO().initFromFile(settings)
[wx_params, wy_params] = parameters.getWidthParams()
[min_z, max_z] = parameters.getZRange()
pixel_size = parameters.getAttr("pixel_size")
# Calculate widths.
z_vals = numpy.arange(-250.0, 251.0, 50)
[sx, sy] = fitzC.calcSxSy(wx_params, wy_params, z_vals)
# Create HDF5 file with these widths.
peaks = {"x" : numpy.zeros(sx.size),
"xsigma" : sx/pixel_size,
"ysigma" : sy/pixel_size}
h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5")
storm_analysis.removeFile(h5_name)
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(256, 256, 10, "XYZZY")
h5.setPixelSize(pixel_size)
h5.addLocalizations(peaks, 1)
# Calculate Z values.
fitzC.fitzRaw(h5_name, 1.5, wx_params, wy_params, min_z, max_z, 1.0e-3)
# Check Z values.
with saH5Py.SAH5Py(h5_name) as h5:
locs = h5.getLocalizationsInFrame(1)
assert(numpy.allclose(locs["z"], z_vals*1.0e-3))
def test_fitz_c_2():
"""
Test that localizations with wx, wy values that are not near
the calibration curve are assigned z values less than z minimum.
"""
# Load 3D parameters.
settings = storm_analysis.getData("test/data/test_3d_3d.xml")
parameters = params.ParametersDAO().initFromFile(settings)
[wx_params, wy_params] = parameters.getWidthParams()
[min_z, max_z] = parameters.getZRange()
pixel_size = parameters.getAttr("pixel_size")
# Calculate widths.
z_vals = numpy.arange(-250.0, 251.0, 100)
[sx, sy] = fitzC.calcSxSy(wx_params, wy_params, z_vals)
# Create HDF5 file with these widths.
peaks = {"x" : numpy.zeros(sx.size),
"xsigma" : sx/pixel_size + numpy.ones(sx.size),
"ysigma" : sy/pixel_size + numpy.ones(sx.size)}
h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5")
storm_analysis.removeFile(h5_name)
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(256, 256, 10, "XYZZY")
h5.setPixelSize(pixel_size)
h5.addLocalizations(peaks, 1)
# Calculate Z values.
fitzC.fitzRaw(h5_name, 1.5, wx_params, wy_params, min_z, max_z, 1.0e-3)
# Check Z values.
with saH5Py.SAH5Py(h5_name) as h5:
locs = h5.getLocalizationsInFrame(1)
assert(numpy.allclose(locs["z"], min_z*numpy.ones(sx.size)-1.0e-3))
def test_fitz_c_3():
"""
Test setting z values for tracked localizations.
"""
# Load 3D parameters.
settings = storm_analysis.getData("test/data/test_3d_3d.xml")
parameters = params.ParametersDAO().initFromFile(settings)
[wx_params, wy_params] = parameters.getWidthParams()
[min_z, max_z] = parameters.getZRange()
pixel_size = parameters.getAttr("pixel_size")
# Calculate widths.
z_vals = numpy.arange(-250.0, 251.0, 50)
[sx, sy] = fitzC.calcSxSy(wx_params, wy_params, z_vals)
# Create HDF5 file with these widths.
track_length = numpy.ones(sx.size)
track_length[:2] = 2
tracks = {"category" : numpy.ones(sx.size, dtype = numpy.int32),
"track_length" : track_length,
"x" : numpy.zeros(sx.size),
"xsigma" : track_length*sx/pixel_size,
"ysigma" : track_length*sy/pixel_size}
h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5")
storm_analysis.removeFile(h5_name)
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(256, 256, 10, "XYZZY")
h5.setPixelSize(pixel_size)
h5.addTracks(tracks)
# Calculate Z values.
fitzC.fitzTracks(h5_name, 1.5, wx_params, wy_params, min_z, max_z, 1.0e-3)
# Check Z values.
with saH5Py.SAH5Py(h5_name) as h5:
for tracks in h5.tracksIterator():
assert(numpy.allclose(tracks["z"], z_vals*1.0e-3))
assert(numpy.allclose(tracks["category"], numpy.ones(sx.size)))
def test_fitz_c_4():
"""
Test that tracks with wx, wy values that are not near the calibration
curve are assigned z values less than z minimum.
Their category remains unchanged as this is done in a separate step.
"""
# Load 3D parameters.
settings = storm_analysis.getData("test/data/test_3d_3d.xml")
parameters = params.ParametersDAO().initFromFile(settings)
[wx_params, wy_params] = parameters.getWidthParams()
[min_z, max_z] = parameters.getZRange()
pixel_size = parameters.getAttr("pixel_size")
# Calculate widths.
z_vals = numpy.arange(-250.0, 251.0, 50)
[sx, sy] = fitzC.calcSxSy(wx_params, wy_params, z_vals)
# Create HDF5 file with these widths.
track_length = numpy.ones(sx.size)
track_length[:2] = 2
tracks = {"category" : numpy.ones(sx.size, dtype = numpy.int32),
"track_length" : track_length,
"x" : numpy.zeros(sx.size),
"xsigma" : track_length*(sx/pixel_size + numpy.ones(sx.size)),
"ysigma" : track_length*(sy/pixel_size + numpy.ones(sx.size))}
h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5")
storm_analysis.removeFile(h5_name)
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(256, 256, 10, "XYZZY")
h5.setPixelSize(pixel_size)
h5.addTracks(tracks)
# Calculate Z values.
fitzC.fitzTracks(h5_name, 1.5, wx_params, wy_params, min_z, max_z, 1.0e-3)
# Check Z values.
with saH5Py.SAH5Py(h5_name) as h5:
for tracks in h5.tracksIterator():
assert(numpy.allclose(tracks["z"], min_z*numpy.ones(sx.size)-1.0e-3))
assert(numpy.allclose(tracks["category"], numpy.ones(sx.size)))
def test_fitz_c_5():
"""
Test that fitz_c.wXwYCurveDistance works correctly.
"""
# Load 3D parameters.
settings = storm_analysis.getData("test/data/test_3d_3d.xml")
parameters = params.ParametersDAO().initFromFile(settings)
[wx_params, wy_params] = parameters.getWidthParams()
[min_z, max_z] = parameters.getZRange()
pixel_size = parameters.getAttr("pixel_size")
# Calculate widths.
z_vals = numpy.arange(-250.0, 251.0, 50)
[sx, sy] = fitzC.calcSxSy(wx_params, wy_params, z_vals)
# Distances should be very close to zero.
dist = fitzC.wXwYCurveDistance(wx_params, wy_params, 2.0*sx, 2.0*sy, min_z, max_z, 0.001)
assert numpy.allclose(dist, numpy.zeros(sx.size))
# First distance should be larger.
sx[0] += 10.0
dist = fitzC.wXwYCurveDistance(wx_params, wy_params, 2.0*sx, 2.0*sy, min_z, max_z, 0.001)
expected = numpy.zeros(sx.size)
expected[0] = 0.0345862
assert numpy.allclose(dist, expected)
if (__name__ == "__main__"):
test_fitz_c_1()
test_fitz_c_2()
test_fitz_c_3()
test_fitz_c_4()
test_fitz_c_5()
| 34.045872
| 93
| 0.665993
| 1,043
| 7,422
| 4.541707
| 0.144775
| 0.041799
| 0.033777
| 0.054043
| 0.888326
| 0.852649
| 0.850538
| 0.838716
| 0.82985
| 0.82985
| 0
| 0.043864
| 0.21059
| 7,422
| 217
| 94
| 34.202765
| 0.764636
| 0.152789
| 0
| 0.716667
| 0
| 0
| 0.061274
| 0.019452
| 0
| 0
| 0
| 0
| 0.066667
| 1
| 0.041667
| false
| 0
| 0.041667
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7fc10ed543a17978cbdea26b15a60be0b32b901e
| 62,100
|
py
|
Python
|
tests/loads-tests/image_base64.py
|
ShawnZhang31/facepipe
|
5f08a121a27674445e5d2b2833eb5aeafc1726aa
|
[
"MIT"
] | 1
|
2021-06-12T12:53:33.000Z
|
2021-06-12T12:53:33.000Z
|
tests/loads-tests/image_base64.py
|
ShawnZhang31/facepipe
|
5f08a121a27674445e5d2b2833eb5aeafc1726aa
|
[
"MIT"
] | null | null | null |
tests/loads-tests/image_base64.py
|
ShawnZhang31/facepipe
|
5f08a121a27674445e5d2b2833eb5aeafc1726aa
|
[
"MIT"
] | null | null | null |
img_base64="data:image/jpeg;base64,/9j/4AAQSkZJRgABAQEAYABgAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAOxAooDASIAAhEBAxEB/8QAHAABAAIDAQEBAAAAAAAAAAAAAAMEAQIFBgcI/8QAQxAAAgIBAgQFAgQDBwMEAQMFAQIAAxEEIQUSMUEGEyJRYTJxFIGRoSNCsRUzUnLB0eEHYvAkNDVD8RYlc1OCkrLC/8QAGQEBAQEBAQEAAAAAAAAAAAAAAAECAwQF/8QAJREBAQACAgMAAgIDAQEAAAAAAAECESExAxJBIlETMgRhcUKR/9oADAMBAAIRAxEAPwD4BERNoREQEREBERL8CIiQIiJdBERARESBERAREQEREaCIiWBERAREQEREmgiIjQRERoIiJdBERGgiIk0EREaCJnES6GIiI0GIxMmYjQYjERGgxGIiNBiMREaDERBjQRERoIiI0EREmgiIjQRERoIiI0EREaCIiNBERKEREBERAREQEREgREQEREBERAREShERAREQEREBERAREQEREBERAREQEREBERAREzAxERAREQERMwMRM8skSh36A494XSPEYl1NBn6m/QSwNC4GVrIHu8m19a5fIT2jkx3E7CcOtdclAB2ImLOGOhweneNnq5QqYjbH6zABOVx9jOieH2jdQCc9pC9BTPMpUH37RtNKRExLFtZGGxkGRFB2Mo1PQTE2I7fE1hCJnExAREkrrDn1OFXuTA0AJOB1mxXkOCd+49puWUemrIHdj1P+0jO/SFYJmIiEIiICIiAiIgIiICIiAiIgIiICIiIEREBERAREQEREUIiJAiIgIiICIiXoIiICIiAiIgIiI0EREBERAREQEREBERAREQEREBERAREQEzMhcjPaZVc7D8yYGACTgbmWK9JzDmdwq/qZJTQqjmY59hLlFXMec7DsP+JLWpENWmrBHIhb/uaXa9KCueTMyrZblXmdu4HaXK6HCKXtRM78gPMQPkD/AFmbXSRvp9EhI5lCD2xkmW/w1SnmCnPT1Cb06iqlGHQdiVwTNhqtNY4elbOYbHnckfkJeCxHZSRgMAR25hMV6YWVs38L2wdjJbNWTaST6zthR1H2kZ1ddpIYIp7Apg/rCbVbNCF9QYDHzsZA2jdtm35umZa8xfMK43B2y0yupBtAxk9xA5R4ed0x84M52p0T0vgrsek9QOW8EbZJ7nBEivpV6eVhzEd89IZseSar8vvNGrIbHb3nU1dBpsYgAD7Sr5RsbLNt9us1tlVK7f6mS1aOy4gVVu5+FnRr0lFQD3epxuF7D7y2NScAebyD2TaNrpyf7K1PdOXH+IyOzR3JgcjGd6pK7HwbMZHcy2NBRYMLaH/yttG19XjyGUcvJg99poQRPX28KG4C5WcfVcHaslk//wAY2nrXHiSWVshwykGRwyREQEREBERAREQEREBERGgiIgIiICIiAiIgIiI0ERElCIiXYRESBERAREShERAREQEREBERAREQEREBERAREQEREBERAREQEREDOJiZmVUscCBlFLnHbvLCJnYbAQtfL6F3bv8AEnqKpgL6m/YTLUieqhmJZVyQMsewlsWafTY5CLm75yF+3uf2lNne0cinm+BsB8yTmWnbIZu+P9JHSLLamy1yxwqgYwoC7fl0mo1h8nyqhyJtkqf9ZSZjYcudh0HaZ6jIAwB3P+kmtrt0KBpuQNYj3PnoLD0+ZbpTTc51C1JSg9gWP7zjKtzjmwxUe3SZqpLtsCPnJMqO69umcei+xyd85IxNWVrqgAhKgY64OJBVpQFDVW1BgMYZ8H9DLCXWKPLuwQPbB/SVFe+s1jdgVPQd/wA5Cz4UMp9Q/eXNUDZZ5ddyuRuA5/3nK1BZW9QIPse0JYmTWqd84YGbLr+YkfOfznKtPO2U2bvIy5G2fV0zCOlfctpHTmGxPvKgcBtuvvK2ST3MkA2A369oEnmnoenf5m3OpCkqJER1GYOVUZ3EgkZgTnBxNkt5WyCR9jIkcY6/kZuACMjYwrp6fimopHZ19iJdTX6fVHFgIYzz6Mw6EyetlfABw0y1HR1nDUtQ2VkY7Tzuo0xrcjGDO9RqXr2c8yzfUaSrWVE1gB5qZFx28oQQcGYlrU0PS5VwdpWIx9puOVjEREIREQEREBERAREQEREBERAREQEREBERAREQEREgRERsIiICIiUIiICIiAiIgIiICIiAiIgIiICIiAiIgJkDMxNkODA1iIgZmIm9dbWOFUZJgYVSx+B1lhUKjPT+s25Vq9A9TDqe01zn1GZakbDfYAgSemsEYJwO5+JrXUxGcbdzLKIqr6t/YdpGpGOTm9NS4TP1dMyUaM+WXdlxn/F1/KV7L+U8rEk9lBmllrj2T77mGt6WvJTJGRj5gitCcttKItZmADHPvNwoz9TNGiV0qQhPofA75O06emRcj+IhQdlfAnCqTlOysPgS5TpC/q5QvyTiRdu7bp2eoMunVlG/Mvv95TKUmw8/NS2Ngd1JmKFuQALa5C/HSdKtWtTkudLFbrzjBk9l9XD1NQByGZXHfqDKtilxyum56MJ6x+FItBZV5qx0AsGQfeUH4fsvJkjrhu8vtE9XnV0hYjCnPaWbtDWlKMASz9ABuZ6QcLqrrNisoDLtn+Q95BqaBRpeYIWcjAJ2AMnsXF5GysVtysMGa8oP0kHvt2l3U6c87HlJbqWzKhRkOSOXMrOmuA/Xr7GZ8px3yPabLg4ycHtNmRkyOueki6QYXoRgzYHl6yQKtygdHH7yPlKnDdBBpIpHcfYzflAO4B+ZFyGv1DdZNUVYcpPKT0Pt94aiet+ZfXvj4m9V/kvlc8n9JrURXZykYPdT0Mluq8teYDKE7fHxCpdVRXraOZSvPPN30GiwpYMTtJYUOVPpzvJNXpV12m5hjmUdZrGsZY76eZIwff5mJNZWyEo2zLIZtxIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiTYREQEREBERKEREBERAREQEREBERAREQEREBERAREQE2TqftNZkQMTMxN1XJ379oGaqWtbCgYHUnoJYPLWvJUTv9Te/wBvia83KvIv54mnVpGtM9SAJYor526ekdT7CaVVGwhVG5nRopB2TdR+5kWRg4J5jnA+kSCwvY+F6Z6y4614J5sds+/2kIYklVTpsAJG1fkNYIVlUnqc7wNMHPpbm+Z1NLobXIJr/Ted7ScDdwOarP3ElykWYWvNV8MblHMuM77S7VwtRg+WSfcie20XAKgckEflmdzT8BoRQ2Mtn2nLLyx2x8LwVHDPMAJA/IdJ0qeC9CRme4r4RSNwmG7kCXauH0KoHIf0nK+Sus8UjwScHZTms5J95IeGuVHOmG98z6AvD6SNkUfcSC3hlbHHKCPgSe9b9MXik4fcACqq5HUHvMDQKSQK2Rt/ynsv7Jq6BSPtNW4eFz8/H+k1M2L448fbpyKDVYMEjA+ZyL9Jazq6jO37z3Ws4XzIML06YnC1ejsrLKa8/MsyY/jeUu0FjIM5AznHvOXdo/Sc7dsT1F6NQp5gCM5AM5113OR/BQA7ADqJ0mTnlhp5a/TvQdwSO0xVaCpRumf0nbuWpmKMSaW2DEbrOTq9E2nsOCCOoIm+3Ppo9LUsGH0E5B9jLXkpqE9AAuxzAH+YdxNdJal6mi44zsPgycaZzz0nA1CYZG7E/wDIhVFNnCDbP8pkz6UqptQHk6bzPljVKSoxapxy9/tLOjbzP4bLk/1/5gVwfPrFeRzgehv9DLOjvLA03LhhsQZpdpjRYGrPobcH2+Ju6G1FtA9S7HfEitNRQKbByg8j/sZiixtPZgjaXAo1FHlknfoc9DIRSSuG+tNmxLpFHiekFv8AHrHXqJwnTlM9VXtms7g9M+05HEtEaHJAPKek3jXLKOTEz3mJpgiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiI0EREBERAREQEREBERAREQEREBERAREQEREBERATMxMwEkrBG80RSxwJPy4G+wiq1+PebKuWxHTJEn0tXm2ZOygZY/EirNFfLWuPqcE7dh/zOqtKV0qGOM/UfaQ6KnzLWLbDm5Rjt3P7YEl1rF3WgDd92x2X/mStyKbq175rXC5wvx8ztcN4SuAWwSfiWOE8HLqrODucz2XDeGV1gHyxnHWcs89O+GG+XP0PCgCo8sn9p6PS8PCADeWaNKqYys6NdC9htPPbt6ZJFerSr0xv9pfq0+O232ktdQ2wJarr95mm0C047ZkyUDHTAlha5IE2hNqw0/aZ/DDG+Za5febBQe0SG1JtKnYTQ6Rc55BOhyfEz5YHQSs7c4aGsjPIJU1XAqdQp9IBI9p3QmDGB2hdvmnGPDV1AYovMo3E8lqOGpapPLh89TPuV2nFyEMoPwZ5Tinh6pC5WocrdQOxmplpdTJ8kvoNWefYe/LKWOfmRwWXoBPc6vRBXaqypQD0Jnk+JaF9I/OpwCevtOmOTjnhpwtTpDVcApwwwyt/inU09ycQoDcvLfX9Q+PeTV016/SMjELaB6SOx/2nPr8zQav+Jmu1QVYY6idZy46021NS1XecgI6ixV67df9x95MlQqsS+tsk4dSP/O8ta1P4SairlZSocnHcf8Ah/aa6Syt9RyrtReCUyPpbuB7e8Ca2qvU1i1fof46N7SlytRd6lyCu4+JfrcU3MjD0OeWxf8AC/Y/nN9VSt6Y6Ov0mU2qJUFvxvhhkfPyJixGR/M6A7OP9ZNVQzVjDeuocyexHcTbqMj+b/wwlUHqAfmGcHoZiyoaio0MA3sZ0FrUp5JGcdDK1yENms5I2mmXj9XQ2n1DIwwZXnoeLacaivzFUBx1nnyMbHrNOdYiZmIQiIgIiICIiAiIgIiICIiAiIgIiICIiNBERAREQEREBERAREQEREBERAREQEREBERAREQEREBMrmJJUu+8VW4yFx0PfEHc/EflM4kVggsQvf4nTppIrICnlH1Y747fqQJT064LW/4dlz3M7Oi04UV5OcNzsT8f8/0gi1w6lq+ZXODWAGPydzLfCdCdbrG1DDq23wJXrZhw3JHK17kn8z/tPW8A0Rr06ejc+85Z5aejx47dPh+i5QDsAJ3dPSQdhvI9Np+Ub7n+k6VNYG+Os89u3pk02qp95crTea1pnEtKkyratMSwi95qiydFhGVE3C+0ATcCVGvLvNuWbATOINtAs25ZsBNsYhEfLM8oE3xMgYgRckhur51xjMu8sjZNpLFleU4twhbaX5UyPb2nhOIcO5WsrZQVPv8A1n2B68jtPP8AG+CJqkZ0UB8ZBAjdjcsvb4pqdJZw28suyg4OO0k1ejTV6Tmez/1GfS/9PynpeLaE12NVdXjPQ4nna0/C2+TahNbH0nry/E74Zftx8njUeF25DaOxuU5KlG3AzIa6H0eubQv9JbnpY/4u3+0ucYoNLprUBJTZiO6+5+ZtxOlddw/z6QfNqAdDnqJ1cLNMHGo5LH25x5b4/Y/kZZp5m0r+YOVkyD+Ur6Vlt09WqIxVcMMP8LSetWNuGYknZgd8MP8AiVNta8qzPVj1boD2I6iZWvAAYZDjmU+xmAoBtUEhW9aEHv3EmrDWVo6uMA7r8yoi8h+QggCwbiRcq2AkDDS9etj2qR2HbuJTtyt/KAfVvkDvCKlulV8vj0nYieT4lpvw+pbH0k7T21gfzPpPK2x+85HHtJ5um81VAK7yxmx5OJmYlYIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAgRMwMgZOJYAAX5mlSdzJCJKsYHb36zPKchR9ROPzivZucjIAzJtOhe42H+X+sirNSAW01ggIu5HuBvOgoK1Hfd2/pt/Un9JT0lbPYTjd28sHHTuf9J2BSEvZQoIQenO/TqT+cNYpBUtvEadMu6177T6HwzT4qTAOcTxHh7Tm++3UMAeZuUZ22n0XQpy1qAJ5vLeXs8WOptfoqwBL1a57SCpekt1rtOTrpMg22lhFIMjrXIlhBjEjKRAJMsjUYko6y1G4G02A3mAJviVATOJnG0zj9IDGRM4me/xGIQA/SZAmQN5sBkQMcvvMcskxtBECA15kLV7Yx+stkSNlztC7eX43wOvW1YCgOpyrYnzniXCLFL1uOW1SfzE+zW1ggHG4M89x7g66is21KA4OQY6bll4r4+GFgNeob0n0uSM49jIeHD8M1ulc5FR2U91P+06XGNF+E1Zfl/htkMPvOLqcr5bqWL1Hlf/ALl/8x+k9GN28+eOrpt+HOls1GjBPkk+bT8g9R+RlnlL3BwSOZdxj+Yf7zNqtfVXbjDoOZfkd5mt+apwN2HqBz1InRxQsVrasoDlTkDsQZNpzyO1ZGQdwPaAq+SzAAjPMpPXB7TWh0LKD0J5Cfb2lRb5SoVsEb42HtIbkJ0jq+xDZU9xJg25Rdu5JMw4yCSMj27wK3lsayrb5HX5la6nzqGrbfbpLdnMp8vPQDE1xynPLmUfPNXQdPqrKj/KdpCZ3PEmmKaoXgYDbGcOVyrEREBERAREQEREBERAREQEREBERAREQEREbCIiAiIgIiICIiAiIgIiICIiAiIgIiICIiBnvMhctiYEmqXCljCpFH7bTDAmbKMD7TA3OJlQ+lB/3H+ks0jykVT39bSEKr3f9ij9h1lmrDPmzu36Dqf9BA6nCU5LDaelFZcj5P8ArLGClWosbrgIo9z1M2rrNelHICHtPOx/oJtQh1Oo0unxkc5Yn33mbdOuM29d4e0Ir0dYIwQATPX6Ws8qnE5XDKOcIoGAAQcdp6KmvlAGJ48ruvdOIlqXGDLVa9NpHWo223lmtcSJtLUO8squwkVYk4HSIjdRmbgTVR0kgHSVGyibjaage82lRmZ7TXvNhCMjfEz3gdJntiBsJkTAmR2gb9sTOPeajrNt5UaneaEA9RJCPaakQIGXffcSB6+ZSpHplthImGDI1HgPFPAhbU5Vcg5OfmfLrqHouFT7ENyHPt2n6F1umXUUEY3xPk/i/gnkXWamtdh9YA7TWF1VyntHmavRVW4OOQkEe3uJBzeW5Zdgpz+X/wCDMoQ1/LnexcD/ADCLVxUlnIOYEoy/P/IzPVHjySIWNdlQx/DJAI7zR2wUsbADjDY+JHTZha2GARtj3A/4ktgcq5G4K84+feajK3U4NinGc9AO0lPKvqyc5OR7SnpGHIpCkYGxEt8gPMwfmZhz4MLtWuJD12+55WzMZNZZSNydjLL0K9JrJAyM/nK1g5qg3Xlx1g253HdL+I0LnA5gM5nhiN59HevzNM9Z7gzwGtoNGqdD0O4ljGStEzMQyREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERARMxAAZwJbAA5VPQbmRUL6skZxJuoPycSWtQHTMwuxzNj9OB32gj+UDrtIqSpMVZJALn9h/ztL2lq57KkIB5nyfsDv8A0MgflrQvjIReUD3x/wAzpcM03PhxseTkXPucb/8AnvCybXdS+BgjLEAKPbvLnh7THU8Vss6VVAKCZQvAWy9g+1ew+ZJwrWWLWy0gjmO/zMZ9O2GpeX0/S6/R6Ovl51yfadSvi2kK8nmAE+8+eU/iAgblIPYttLKCz63G4PU5I/Sef0ej2r6LVrdPYAVsBl+t1cZUgj7z5tRrHT086k49p19Hxq5CFLAY7CS4U9pXuVbG2ZOjdJ5yjjlbAcx3zOtp9ZXbgqwk0rprvJRKyWDPWTq0Ik7TaaAzIhGwG0znGZrmMwNwek2E0E2EDYHebiaLJAOkIyOuJtjaYA3m4E0jWakSQiaEQIyJqRkGSETUyKruuP8AzrPO8f4cl1bcygo6kHaeleVNVULqipHYiTSy6r4BxPQtodVbU2Qa35l/8+0gqbzbLaFYZcBkB/xCex8Y8NNf8YL6lPK08Dex0up0+pX6VbB+xnp8d3HHzY6u1lXBqHNjOc/aWD6KVbchDj7gykzMlzrj0uCygyxp7A3Kh3Fox9p0edpo3Wt2rz9LZAPtOlQfLYcp3VjvnoDODqs06pbM7j0/cidWu7fn6c6jO3t/4JpFlS6uSRkqcYMzYvllyM+WT+xm1wD8pU9d5rzOKmUkMCMYxKbYK+hT/KZ4bxBR5WrLfOJ7us50vKADgzyXierBV/nMJXmZiZiEYiIkCIiAmZiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiZgIBibVrzMBAsVehBn7zcjfA7D94xuBj5mF3PyzTFbZOM/5R+8m01ZewOeg9X+0jbYEDrL2nQrQSdhWP1gaXpl66gc8zAEd9uv7mdyj0VhMEY3P2xOToqjbxLJGRWm+ffr/AFM6ueajl6s+cn2H/mIreKC0FtMK1PquYY+09t4e8NVpQhsOWIGZ5fRILeK0rjavHafUuGKiVAkgbYnDy5WcR6fFhO1ijgulNeBXn5O81t4ICpFJK/E7OncHAAG4wJdRQR/WcZXWx4PV8BsRSQuG74P7zmGi7TAlskDbOJ9Ot0i3Luuf9pwddwbqVB6nY+037Vn1leQW10uzn0n1Z6ztaHWNVaASQOoMifhTJ0TG++D0kS6aypAN/TJa1MdPWaLiBZirHf3ncq1AdRPBaa1xYmM567+89RoLeasHMhY7gbMkXeVqWziWUGREYbCM4meX7yKxuXuYEnPgzPmDE5N2v5LiOb6fqHxIn4gVvCK24wP2JhdO4tgJ6/aTqcAYM8yeLFXIUgogGTjcn2EmHGWXlVmCs2w22Pvj4EbPWvRq3vJl6Tyf/wCqaqnwwHXG5xOpTx3T2gHlZOboT0mpWLjXZIwJGwkC6+oqGL7fMm82twCHXBG2TKzNtSO806yUpnpvNGGJmtRCw3OJC43Ilg7ggyGx1HVgG7DMK8r4l4cmooc4+oET4txbSmpra3HQz7/xFPxFTKwKggjGdzPkfinRct3OF2fY7dDOnjurpnyc4vIG5zpUdt2Ucv6f8Tai3lUuD03Ej04JV636F8mR58sWKd9jPQ8libiy5TnBJ5sNmS8PvezTAF/pOwkFjC7QpvkhSJV0FwS9FPQ7Y+ZqMu/VexoIbHNkjaZWx1oJYjY7kdpRW0pbYpwOjCbKwzbVnqMiUrp6ZwtTgNn2nB8TqbNMrKMgMJ09IyPyhTjpkyPj1CvwoOvVesHx4CJlhhyJiEIiJBiJmYgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAme0wJmBiZmIgZk9S4XMiUS2npByNpKsAPSxPQbfczZB6ie6jH5zUnJXPQb4m67V57neZabVILLgDuBuZfclNMid3OTv2lfSJ3A+syxnm1VgAytY5AP6wsSaJClTO2zWNv9p1FAUlT1VVH2J3MgqUclac2csF++NyZvY/NQ1mSC/U/eK3HR4QOfUPcTnLgA/YT2mk14HKM9htPCcPuGm0u535tvtOnpuIErzcwUA955s5bXrwskfSNFrVAAY/OZ3NPqFsXoQDPm2l4nWGP8cvk4GNhmdbT8Z0o0y2p5+Ng2/Q5x7znrKOv419BrvrO2f2kjULevYzy2j1KseerUuvTZxO3p9e1Koblyh/8AsTcfmO0M3H9K+q4eEOeXGZRt0SspyO3YT1DCvUVcwIII2InMupOCBLYkv7ecOjVG26AfvL+iZlIAG2MSLVHlfE30zesEHrMtu/QckToVic7S7idOoYEscsmWG05+oVvUVPSdUJke8r3aZbDjB6ysyvNamsMBYBytYMGV/Ld1Zm6lubYdJ6FtKPpI+xkTaKtuq9dj8w3K8yK7bNQqhcLzc3/nwJPYyV1kKOZmHTmIyPk9hO2NHWrNhRyntNfwVIU/wxn5hbXnjUd7LArW9RjHKv6yOwMrfxb+ZW7oh2z8nfH2npl0NJ2Kg+2RtJq+GUeZzClCcYyVyZdpXnNHxOqqvlTVM4xhlbbE73D+J0WAV2YORsD1Elt4QWOQ6lT1UoDOPruHWaWxrUXBByTjaWM9vW1103r6fq74OJhtKAfrsH/95nnuG8UsrwrPkr1I7iempvXUU86kHsYsZu4rHTIAd3P3cnMytNa7qoBk5HWaASLtU1FQKtgDOM7CfOPFulV67jsGzzL7T6dfsOYDp/SeF8X1KQwVSdhnH9Jcez5Xx/U1nT61WA2sz+sqas8qsehDY377ztcSpU2FeXDK3cTj65kGpYEHlcANnscdRPVK8mc5QUupVqz36Skrcjttgg5ElTNVoBbIOQG95XvOLyM422mtubqXWl2puGMsvKZubgmorfOxGDKFVofSEZ9SnI/1gsWqBzurS7HW0lgFg7DP+s6usRX4QwG5wZ5zTWfxE6kier5PN4WcL2javl1wxa49jNJPrU8vV2r/AN0glZIiJBiIiAiIgIiZMBMREBERAREQEREBERAREQEREBERAREQEREBERATMxMwEREBEQOsCSsZcD85Y3Jx2ziQ1DqfeWF2TPeZrcYPqYj32kreohR3mtQ9WT0EloOLTcRsgz+cixapbyldx/8ASMnHx/yZto0xWrMDzHLf+fnNdRWatJUn81x5nPfA3lvSVh7AM+lACYa0mFPLzqO1YB+Cdz+0r238qB8jGcqOoxjAnURCaD6AGuyduwP/ABOLdy36xaV+nmwfsJGrFywtVTXnJ9O3sJT/ABGoezbPwRO81GaVVF5jjeR6TReXeC4wO3xM+803/Hb9Q6LgfE+JtlFIB3yzYmOJaTV8FuGnuv8AMdxzFUM99wltlA5XI/wdcfInn/F+mH9p13EZRk5cnsY95Zwkwyl1XF0PijXcONfLfYpUjCtuMT6L4a8faTWMtevY03OcJYq4X4B/3nzfU01W8N0+mp04XUJe72XHq6lVAX8iCfznoOG6LVcbThHBPIFaaJ7WN1e7crkE57bY/eSzGzjt03njeen2DSakaZlKEeTZvhdxg9x7CXbhs2AcZnzrh3EbvDXEjwnU2m7Q55Vdhuue32n0A2htCGByCNjnqO04unfLgcQceZjv3mNESzdd5Bq3LXHH2k2gz5g2nOuk6eo0q+kYnSpTpKWjT0rOvRX0OJY55VuteF3kDjGfaW2PKJTvfAM0xFS0jJkBImLrRn5lOzUhCd/3kbkWzgjftMZOdvact+IVr1dS3sJtRxNTuTgfaTbfrXTCE4I2kiMUPtNNPq67CArA7dJbKBx039pWLxxW9doJx3kllK3puo3lIZUyzVYDjsZds2ODxPgrqfN0oAPNkjpmQ8H4o+k1i03qQtnpJPZhPVkKw989pwOMcLG2oqBBHVZrab3xXbYdfaajp1xK+h1P4jRr1LLsc7Scrnrv/SZEbnzNgfTjc+88d4kTlvpTGx2wfieyfYflPH+JWJ5GGMKxbP7STuNx804nRv5g9QLHOes83xpTVfXhcFlAz/Sey1dQdGVRn6iTieR8QVMEVyTlUA/MT1R5Mo4TWFXyRlG7exkeuTlRLAcjpmZZ+dWXGA3qU/M1ZvM0Tod8eofE25saZ8MwPQ9ZMD6XrB6jIlGmzBBk5flv+IRe0jct6Z+89tpwDw1ySc4wBPCaQY1RzvjpPc6LbQHPU9pYunzrjScvELDObOvx9OXiLnGJyCO4lYIiICIiBgTMRAxEzMQMzEzMGAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgJmYmYCIiAjrE2QZYAdcxSJ0XCftJm9hNejKv7zdhltph0kZxy1/fYS1TUCK6yOuXbHsJXALWBewx+sv6exa6r7cdSEDd8Dc4/P+kNNbD5uqUfygfoB/zOhol/g+YAP4px+U51Ya6zkXY2kLn2Wd7QUqKWbmGAVSvMza1jyzdYqUjH1cpUZ7f+D+so8J0Ju1wbqF3P3k2qcXalvLzyjCr8+89n4f4EKtLzsg52YHp1mM8tR1ww9qzw/hakAlf1nV/sWmwZCgH7TtUcOCAemXBodpw29E4eWHAzW2a+ZfttJDw/UMeW9Vvrz9LqD/UT040jLN105AwRC+zzn9g8OsUB+HUj7KRv+U6uh4fVok5dAF0zHqyrnP5mdMVbYIkgQKOgEqWvK8a8KarXXLeNZzO7epSvQT0zY0XDk0+eYqoXP5SVtl5j2nL1dhY7mLUnKk552J+Z0+GUFmBHec6tOZ56fhWnCqGPec2r07Gkq5VHtOko5VkFC/EnPSdI4W7rDmc/WMQsumUteP4cpHC1d/l5OZwNfr1qy19mFx6VB3Pt95e4/ZqK0QaYVmzHMA7Yz9vmeOfhHENbdXZqOIJ/FUNirqvx8GJNu3Em0+o8V0ae7lVF5NvU5wQO+0k0ni+t9YKw2m5eYDJYgdOvSV9d4b4XwrhN2tsqa6xFJHOx6z50z6pRZfX60QczqEyFGep9huP1m/445/zfX2rh/iTS6ixxZQ3IH5fNqIcftvjvPRabiNb1ltPeNRWpwQu5X7+32M+A8M1XEX0mp19OUXTlBY9Zw3qJAwPyno+F+LBRZVZYbQMbW1nDOf8Auzsfz3kuFhPJjk+zratyAgGZRmU46CeR4Z4hr1z1i+1abSuVYN6bvgT0dOrW8c5U1joAxwT8/aYvDWv069ZJx6v0kj0qyk4lGrUDHv8AaWluZh0hjKKlVP4fWnAwjDt0zLLsAQCd+wHWaW5J3bH2myjGQq/n7wiKzJXmswB/hnkPEnq8/b6axgfOZ7K1RyZO5/pPH8dAZLznALhf2k+t4/XhySleMEkjfeeY4/V6cY2JM9MpyjP7MO843GafM5WyD3+209EefOPBvzZOBjkOcfEABLSrbg/0M31aeVc3zInfmCNjoMTo46VlHKxz/KZNafpJ9pFaMWEjuJIo5qge8Glyhz51JH8xwZ72kK2kUj/DieC0OS42+kgz6FoE83RHGOnSJeVeA8RIPxxYfacHptPReIduIOnQ5nnW+ozbnWIiIQiIgIiICImIGZiZiBiIiAiIgIiICIiAiIgIiICIiAiIgIiZgYiIgJmYiBmIiAk1Cktke0h7yzXlK8+8VYlTexm6gbCb1n1E9hNEBVMHr1Mz9KZEw3tPUM8z9+g+8tXnNddCAAKOX7nuZHo81WKWHpqHO2emcSMgkeaxwev5npI1F7R1s9xYYPN/DT4Hc/pmdfUP5NCeWRvlUHux2z+kh0umSuupMYPKfuR1P+0yXN1jah8YVTyKOg7CTt06i/wXRC/W11gcwAyfvPqXDqV5GGNuk8R4QoyfNI35Qo2/Mz6Jo68AHGARjHzPN5LuvX45rB0kqDIDjeTCr4m1AwoxLKqJmM1X8n4mDVgdJbAGPiYYbGVFMptInwBLLnAlK99jG1itfb1GZznBJ36Se1+ZjIc5IEzWk+jo57QMfees0dYVAAOk4vCtP6ufE9Npqum0sjOdWa15V+ZsZnoYM05I2Er6qvnp26yywmhGRLB5Li+hTVVKroCV9xOB/ZSV2ZQFWHQie71enDE7bTk26XlOQJNO2OfDy/EdJfreHXaTmTncYHmdDPDanhXFODafXaZ9OU0+vp8ixlHNleYNsR03UT6tbplsXBErrVqNNkUkMM/zdpd2LqX4+N6Si/T6a3TVPYKruXmQfzkHI/Qz3CeCRX4aVdQh/EH1nHUZ7T2DNdZTWtmgodkOQ4XGD8fM0vN9uwrsUd8TczYvj3/p8h091vBNaVassAcqX2x8ifTfD3HqtfXW7qbtVgKoQYGP/O8qa7wwmuc5qAYDGVG810nhHWaIK1VzJ35kbBH395m3c5amPrxLw97V6up3xk8plytRjcfqZxeE06umpV1DeY3dydzO2nzMM5JAB7TbGN5qJuYYQXnCGeI8QOP7OvYnJyzAftPa6w4oc+wnz7xIwGgZScZwP3j63j1Xlq2zpwue+DKeuQMtmACAuBLFbAuFGCF6iR6pwKbAAs9EcMu3heM0fwVuA3Ddvaclh6cZ27T1OvoD6HUZzjlyJ5MEsmP8M6OIfrXPcTan+7b4MjsOAD7GS0gHmweo6GSrHQ4Zj8QPme74OebTuT0wdp4PSjkcEHee34QzJpSQNuUmMSvDeIHI4vYD1BnFsA5sjoZ1fEBD8Wvbm/mnK2K4yc9p1cWkREgREQEREBMYmYgIjvEBMTPeYMBERAREQEREBERAREQEREBERATMxMwETEQEzMTMBj4iMnMznPYQMqMn5lpselOw2kVKgvsNhJ8KGGc9N5LWpG3+syq5sUHZQcnM2AUdzgdBNqP4tmAoJY9+wmWvqdxyaUJvzWnL/Yf8yXSaZr9RWHGAvqYHv7Ca3c1l5UZ22/ITqaCry6jZ+f3kbkTXWstJHLu/pTHUiR3ZNq0V9EwWx+0yjG/Vu4wa6tge2cbmS6cGzV1KB6rLMk/AkvEdMZu6e88NaRtPpq++Rn9Z7bSr6V26TzvC0/hKAMAACek0+cKJ5Mu3s6mnSp+npLK9pWrlhekOdb++Zo02J2kTnAhENrbGcvU2bneXr2wCZydVbjeKsiu7b/Jk2moaxxtK1KtbbnsJ6TQaXlUEjcyaat0t6CjlUbYE7lC8qylSoWXl+kYmpHLK7bHrmJgHeMzTLJE0Im+ZqZUV7K8ynbSD2nRYbSJlhY41mkBORsZCdOynpOyax7TU1A9pG/Zy0QjblkwrBHT9pdFC56TcVL7QeymlAP8ALJ1pHLykCWBWB2m3JDNqCukKOm0l5AJuBEI0x8TJ2EGYJ2kVT4g3/pnHc7T5r4qvHMKsbc4z+U+ja9vSoz3zPlXH7hbqrMgHByPjJ6xj23/5cine5gBjc5/2kd7KarR7GK7uUM24IJIxKdthWu35nojz5XlSa4W6K0HGfUpzPFYKXMufierVgtDr2bJnl7UKaw56Tbj9aWD+HmKGIfHvJHUhCDNKV9amZ+N65X6Rmwbd57nheToCB7YnitOhssGOk9noDyaEgdZcZymTwHGRzcS1GOzTl5xOhxF+fV6hvdiP3nPM6uIesREgREQEREBERAREQMRMzEBERAREQEREBERAREQERiICIiAiO8GAmZiZgYmYiAmRtvMTPWBPQMb+8sphnyegxmQVg7AfaThSK8dMnJma3AsSTnriXOHr/eNjZRyg/Mq4Gw7neX6f4OmrRQSWBP3/APCZK1Ga1wwXBLu232l/WagaVU0tJza/q+3xKum/gXgsea3BLnso+Jtw1fO1NmssXPZAewka2vaZPJpFWRk/Ue5MscPUf2qD1CKP17yszctqEjpuRLvAwH1djEZyNv1mM67+KPpvDcGlCO/7TuafrOBwwhalHtO7pznE8r1V1KjvLAMqVNgSwDtLHKpCZDY03Jz3kFhxtKitqD1nG1ObLQo6CdHU2DcTmE4JY+8lai9w+tTcB27z1FSgBcCeO0l/l3ddp6/QOLEBJiJlwuJ1l5F568joJRPKh6y1pr1VSGOxm45Zb1uNjNSZuWDH0zQiQObEZkRPz3mQ+8okP7SJtzN87CYPaBpiYxvNx3mMbQNQJuFgDrNh0gYKzUiS4mCIERmud5uZGdpFDNCZknaRsYHM4lYEV2/w1sZ8f4pZ/EtbOctjP2n07xHf5Wg1b+yAT5NxTKNVU31gerf84w7bvGKuLStJ33JwJXuY5AzsZuy8yYGx6yMVtctZ6ZM9Eunmym3HvsKMynsMfvORrMGwfsZ6Gzh5fUOjE7kDP3M97T/0i0PEuGI41V9dxAyVII++IyzkMfHleXx8oWGD1xI6VIYg9jPTcf8ACOv8L60afVfxKHBNdoGMzgCoC04OcyS7auOu3R4fs2MT01RKcPPY9R9p5rRruoA6mehvBq0T439J2/Kbxc8nz3U2c72HuWJlabuckzSdXAiIkCIiAiIgIiICIiAgxEDEREBERAREQEREBERAREQEGIgIjMQM9oiICBMYmYDvNwSJp3m6jeBcRsADp3kvKXxvtI6l9W/YbCbuT2O+MTNdIxnLkA7k4H2l928tlA9XIuw+ZR0wCubG35emfeb872NhSck/sJKs6WkRzSUz/FtbG3XE7Hp01HIAAKh0HdvaV9HUKlFjg8yDYY3z2hg1moqoXcucmGvre1XWtebPM+Dv7TqeHFy62e7YMoa4guW6YQ4E6PhME6YenPp2nDK8PVh2+gcOOBy+072nfI+08/onB5WwQT2IxO1Q2RnM4O1dWpsAS0rTnVv0ltX6byysVOTKOtuepMohck4wJZLZBla1veaZUb2yM43xKDH0474l285BnNsYofjMza6YvManxNbw/wASJpnQNptg7dwT3n0nhvEPSBzbf1nhdRwDT6jiS6ljzDqVx3nodExpAXcgSbnxbJXsF1AcfVmQvxSmlwj3VofZnAMg0lSX0HL8pIx8zwH/AFA8Js3Dm1ujDm+o82QTlhL2zjjPr6zptUtmCp6/MuMw5MifLv8ApvrdaeCpXq3dmRsKXO+J9HW3KCWX4znhqti3qgGRK2SZuDKxpMGzM9pGs3HSVGZkCJmAA3mwmIMqMkzUmC0jLSKEzQmC01JkGCZC7bGSE7dZU1VgroY/Eza1Hk/Fl2dBXSv1XXbj4G5nzTXudRxRwuSc7Ce48Rarn1lq59Olp5Afd23P9MTw+lIF7XYy+Tgmbw6XLlqyBTgjfGCJTpsFOuepht9+/vPQ0aFtVqa1C7sCW9szz/GNFbp+KUYAGWxn4PvOkrnlira9jVYuoGQGJH6GfYvBHinQ67T06XnK2BMervPkvEKWbhd1Rxz02Akfbr/pLPh+99NfpLFJDc2Npy8t1Nu/gw97619d/wCoHDKtf4V1jcimypDYhx0K7z856hBzLag2bfE/SnGdWv8A+j9bdaRgaZmOf8s/N1TB9FXnqCOsvhy3jty8s1dLvDRzXLt0na4y5q4XZaRj0ETkcNPLdmdDxLd/+xFe+wnqweXOvnpmJlhiYnRxIiJAiIgIiICIiAiIgIiYMBEzMQAiIgIiICIiAiIgIiICIiBmIiBiZiICIiBkCT1qM5PWQqcCWKFz8gwsWFHqPsJq5z6RnMkXOSB095G7crEjqTtMtJQpIVFPTLSzoNObbhWT6UG5HvIKmx5hxk7Io+TOjpz+C0zv/PjAHzI1Fi6wG7yayQF6n3Mk0CDzjaCcrsPtKukTKZfPMRkzo6ZfLUKOmdyBJl06eObu0GtIW0hiCOQ/ltOx4U/hVVfIwf1nC4iMXWg7YBH7Tu+FmD6OkH5BnLLp3wv5Pf6VQOYHodxOjVv95y9K2awD1E6NTbbzg7ujW20so20o1t7/AKywj+/6yRKslpXc9ZsX2xmQu8rCtduSe0pWJnoDmXm3m1enDkZkalQcP0DXWjP0zutwqutQ+Jc4boOVFbHWW9b6aiMdpdcJcudIOHadGXlI2ljV6FjUVCh0I3Uxw5emJ0ic9ZZOGMsrMnktLoq9JaVrqKKTnGJ3q62NYyCBL4QZzMmvPzEx0uXkuShy4Jmwk1leBmQkSs723B6TYHeRr2m4gSDbM2mgbabZlRnMwTtMEzQtIrJMjYwWmpPWAzNSZjPeak4EisM3pM5XEtQqKSx9CDmb7CdC2zHaeV8Q6liE0y//AGn1n/tHWRY8dxi5107Mx/i3t5jZ+Zw6SunKFlzgZ+86vFW/EalnYHy1xgf0nMeprblJOygCb+NSbr3nhmmrWaJ2KDzOcDf27zjeO+B54f8AjqSoaonmHzLfhu5q/KA2BeWfF2pWvQamvIzaoXf5wJiZcmWD5lVqubh2rYgFmrC7/cT1Pg3w6/EhXdZtUjZ9u4E8zxjTro+KnSVbh6a9gMeqfVOG6ijw34T/ABF7KvKufvL5eZpfDlcd157/AKp+Ik4XwZeB6Z826hQHx2Tv+s+T0MRpwDv6hJeMa/U8b4pqNbqTzWWHYf4V7CR14REU+87ePD0x082eVyytrrcMrzaoPcybxgvlcMAU91zJOEqGuryO+Myt40cLpK092H+s74dOGbxLHIms2x6TNZpyIiICIiAiIgIiICIiAiIgJgzMwYCImYGIiICIiAiIgIiICIiBmYMRATIiICIiA7S1T6cg9hkyBBvk9t5NVuD8mFi4uyj/AA9TnvIiQbl2ziTP6FA95FWAC1h3IGwmWlquwUqCUBYnm+03NvmOARkLufkyFRuHPtsJY0Ona1stsM5wYVd0/MRzMNydhOgoKFVGCTgYPWVK2D3+nHIu0sc2dbWuTgbznbt3w4iHiR/9PZZyj17j8pf8KP8AwQP8Lf1nI1b+XQgdjy+Z0+CcToeFz5OpNRO0zl01hdZPpOlPpBE6VRGJx9I5A+206dZ3BHSeZ6l+tpOGGOsqK2Bmbc/YRETtbjODIWt23kdlgUbn7zhcR4ytOUrb1e/tKuOFzuo7j62ijexxv2mE8QaWoqVQnfByZ4O/iD3H6jFWqPN6mmXrw/xsZ/Z9s4ZxvTaqlTUVB6Yz0m+q57STkbz49wzjD6fVhQ5CEjf2nubeNMygVWnHc95PbSX/AAvy3i9pw+seWSGBPeWjses8LpON36ewN5mR+09XoOK0a4bNysexM3hlLw83m/xs8OfjoqZuDIjt3mwadHlZcZlRxvLROZC4zJSIPvM5xDbGY7SNNw0yGkeZkGFb5mDMTEgwTtNTmZMxAwZGxwMyQyJtxIKmptVKmZjsN54PWamzUau24g+V9Kfaeh8Q63yq/JXJLD6R1nmwOa9KFUHk+r7/APEaanTn6yg+WinGbH5sKOk1fh4GiRx/MST8y/qMNeGCdBjm950npralKlGGbA+wlt4bwinwqrlGnI7ONpS8Z3Jfr9NpFYBmZScew3nouG6YLaGI9KknE8oDVxLxjfdYrFNPkKe225/pMY97bz54cDX0+b4vCnc1qnN9+s6HjPip1VWn4ahwtQ57CD1JHSV67Us4rruIuchrGI+QJxtQ72Wmx8kk5OZ3mPMrz26ljiLVy6sKe5muroYBGXbBwZZ1QFVwc9DOkukW7TBj0IBnaOFm2ODkm1M+0peNvoq+H/0nR09XkXZA25sZ+JxPFuo82ypPnM3j045vMTXvNu2Jg9BNObEREBERAREQEREBERAREQEwZmIGIiICIiAiIgIiICImYGImZiAiZiAiIgIHWMTIUwNsemWqFGFX5kPJggS3p1CKGPbMVY3uPU9T0E1qqZgTkbzWx8gbbmbo5Nf59plpYYYZa1OcbS/X/C0xHdpR0tYa3r0nRrq8+7kAwq9ZK3EunqK1r3JlgZOrUqPpXJ+8kChKzuAAOsi0jGxrLCBhVON5l1cvihPO1R/mRiv3G8tcEvK6tSeoIJ+xlDirqHptDA+rO3tGiu8vWVNnIZeX9DFnCTL8n1fRueUEzrVHp7Tz3C7fMpXJ6Ded7Ttty+08lmq9ku4vA7TDNyzVTtNbD3kVwOP8Us06lEO3SeM1HEQmXvblUHBJnq+M1k3qx6dpyDw7Tag8tiA56kxLPr24YX1/FQ0bnXIXoIZB1OZ09Nw664kI9eR8zp8N8OaNlwiKCfbadMeFlU81Dsp7kGX3w/TX8fnn/qf/ABwG4Vq6H8w1ZAwSQczqtqWAVijocbZGMzonhmvqr8sWBww/m9p0ls5tPyXaNthjAww/ePXDL6fyefD+2O/+OHpeKKw5XPTadTS8QNLh632B9+k5nE9DpSyOlFlOTglUxOe9Wo0hDLmyrOzAzF8dnMdcfPhn+Nmv+vrvBuNVa6gIx/iD951eYDvPjHDONHS6lGV++8+m8M4vXrtKrht+4msc/lfP/wAr/F/jy9seq7fOO8jY5kCXBu825pvbx6ZaazOZiRomRAEziA7TBm2Jqw79IRiY7GMkdpqzgbYP6SKGV9VatFDMTgyYsMZOf0nlPEXEHutGkoJJPUwRytbqLLtQ+qPQfQCJnS6NmrLkHmcdfYdz95toNMup1QLZeiojO/1H2nsV0lICl1ClRzOANs46S3hZeXmTwJ/KDY5tuds9h2laquxLizqcDYZnrnJXSFgPXccgHsO05vEHrShECjmmMt10wycx9T+D4dqtVg+hTj7zwVGuOl4VqdQf7zUKQnvv3nofFGsVqKdACV81huPYdf3nk7/Lu4hivJo04wD2OJvx47TPLXKFauTS11b5G7CVLkOTn36S5zNzM56mQ2gM4PvPQ87hcZ9Dqq9952OGOLeF7ZLDac3jVZGsX25Za4OGVWq7FZv457/KumKi1I26LPF+JyPxVYH+Ge6Oa9EzN7TwvHV8xFsx6lO5+DNuN5cPY7zG0xAO8rDETJxnaYgIiICIiAiIgIiICIiAiIgYjMRAREQEREBMxEBER3gJgR3mYCIjvAAkdJtzn4/Saxj5gZDMO825s9c/lNJsoyRAssgBBDdRncSwikVY23+ZCwLLWv5GSk4wB1ziStRs1YX1OcADpN0QKqdgDn7zUo19uB+8upR6gTg47CRYk09JVcqPU3ednSUCuvOwJ6k95S0qMxBIwBLjNtscj2krpir6+7y6So2J2GJoreRwqyxtu0h1ObNUgb78szxMlOHoucc5Jx9pNLv65fEuXyKFC4yDK7u1emotH/12bY9us31zkihf8NeZFt+AYMSRzbTTFfSPD2q59PW2djtnM9bQ/wAz5n4M1gehtOxyUPf2n0PTucLvmeTyY6r2+LLeMddG26zJGQZDU2QJOu85urna/RfiaiB9Q3Wedatq7CjKUcbT2hT95T1fDqdSp5xhh0YdRI7+LzenF6cvhmpauwKT1M9ZpLQ6bMPeeUPCdTSwamwWAdAdjJqddrNI556Gx8DMzp7Z5McpxXs8o1nqxsP6SbyEasAf4lE8rRx5XYh1KMQes7FGuVuUq4xn3+JOFuOWuHaGgrsOCBj+s4fF/BGj4pUx5GpsyQHpPKfz7GdKrVsVGGwcbby5VeXXe5UPbedJY8+Uynb5Rb4D4xwi8tprxqdOzcvK2QRLei4lxTgbqb9NalTErzEHlJn1LNbgZYHlP7+8f2VptYDzoGr5uYhh1MWbT+bHDH1ynCpwnXtrNMlnuMzsKx2MrpoatMOSpFVfYCTIMbSzb5+VlvCaZAmFm0rJMiAJmRCYO5g9JjG2YGpxNTNmIxkyvddhGIICgfUYFTiuvTQ6ZmJAONh7n2ni6Ws1N7O399b1x2Et8Y1v4q/CsfLXcGWOA6DztR5zjONgvv8AH2movTo8Hoo0oV3KqT/dqeuf8U7GrRlv0+nOy2DmJJ6iVih4ZxOq4gWJYfV6QeX2xOpZbRp1d7qfNe45AYbgHoBL67nLOWVlmlLVEBXIGT0HxPOaywcrO/Xos7bOtWnFbHGOvwPafOfEvHRqLbNNpH2zgssx623TrMvXlwtfrX1/GHdMkqOSv4HvI2UUVmld+7H3MmppXSVeZ1c/vKzepiTk77z0Sajz5ZbqGsGyyzOwAEw+A+495JV/fW474AkN5ZXIz2MNTpzeM8z65AR/IDJuFEcxPcbfvINdltVWxJ3TBlrh9fKrEY3x/WdJ05Xtd4nfy6UKNges8dqrEsvKP9DDlnp+Ov5ejweonhXt8z1dzvNuNqk6lWZT2OJrJLh6+b/FvI5pgiIkCIiAiIgIiICIiAiIgIiIGIxEQEREBERAzMREBmZmMRAzEdpjEDMRGMQEzjeYiBnHyJLWFQFicyICb/eBbpK8rE7kbyaog4wMd5WqPpPztLlVROFUYkrUT1mpRvkt74nQ01IIDEZzIdNoGbBONvedMKtS4OxEjUiQhUrwux6dJow5ad+p7zNY5+hzIdTYyen7yNqCZbVPYSdtgDIeKWG3VUUdMLv+ct6RcWrzfJlO5hdxU2EZwc5gUNfg6uwDoiBZCcjRHfCk9PzmbnDtqH/xMMfrMWjGjQe5xKwueGtX+F4xWC2Fs9J/0n1rR2ZQT4cjFSrqcMpyDPrPh/iK63QU3KfqGG+D3nHzY/Xf/Gy+V66lukuIMic/TtkCdCueevWsImcSXyQRnEzSMgS/VVlQcSaFFdGD1HebHhYfoDmdeqjODjeXUoHtkyyJ7PI38FBGTWG+cSOvhSKdudO+VM9qdKpG6yI6ADcAY9pLi1j5spxK87Rw9hjF7Ef906dOg/7zmXxpAv8ALJUpx1k9W7/kZ/tijTIu5PN8GXAcDAGB7CRquOkkmo4ZZXK8tWGRNQBJJoZWWy9ptNAZvmQZg9JjrMF+QZY4X3hG3xNbHVRucn2EjNjP/dj8zHKKxk7k/vA0PMy5sOB2AnmuOcUNznSadhyKcWEd/iS8X4yzl9NpXA2/iXdkHxPP6ceb5hrPJQgw1jdfk/eWRZwg1GqroQvcfhQfeel8PaqhNPlXBdjvvPi/irxE2q4kdPon5aKm+o951OEa7ifkJrNFargekp8+8eSZSTTXiuOW5e32TVaizV6lKNPYgdDznIzk9hMcS1z8LU3cStTnYfWDttPOeGOLXUfiNfqGcLpai97pXzY2ydvgDpPmfivxnqfFPEKndSq158uvP1En6mH+keOe8tZ8lmGUmnqOPeNW1hejh7MAx5Tbjr9px+H6MhQzdOsrcN4a1dYu1B9R3+0vX3FKwi/zHrO+OMjnlnb2i1NvOzYGy9JHjkoU43Y5kNrHnxneb2FgiKR22mqzGiggsR1YyDVo/MADnYCTBi2W6AMcSHX6lEvqTof+JltR1QDmv3xidFKxp60A68oM5l459XUqknuZ16l5m5W3IE64xxy7cXxRdnTMBsQB/SeIZugE9Z4mtB5lztnH5TyBGCR7TbhWzPkAHtNDMzEIREQEREBERAREQEREBERAREQMRMzEBERAREQEzMRmAiIgZ7RMTMDIGe4+xm5rYjIBwOsj+ZsrspyrFT7g4lGAR3mRgb4yDJfPsb6iG+6gzAVmbYD7SDUKc8w6e8ycs0sV0OpxjH3lvTaXfIrz9o2umdHpecDmB950l03QDO3x0maaAm5OPiT+bjZcGZbmk1RWpOTOfmbMC4AB+8jWo5znMmICL1hptzitOuPtOfqbAWC9zvJ2bmbfp8Snblr9t8CBLUBVVZaSchTj85zqWC6a7UEdFwDL2oby9Cd93O05+oUVcOI/x7wlcsgtST3LSXWN6aUHQLmYKkeUvYkkSLVE+cR7AQz8Qj6TPT+DuK/hdYdHY3otOUz2aeZHQxU7VuHQkMpyCOxjKbmjHK45Sx950VwYCdmo5E8L4Y4r+O0lbM38QAB/vPZ6Z8r8zxZTV0+ljdzbrUHBHt0nVoxgCcWlsTqaezPeZK61WJbrxiUKm6S7W2d8zTFWVG2JN5YI6SFGHSTqwljFRmnG008qWSQZqZdJtByYmCJNianbrIqOaGSHrNDIrWMzVmC9TIss5x0EzVTmzbCDJ/pMKhbdjzffoISvbfH27SLU6yvT1lmcAKMk5lElhrqrJJCgdTnGJ5bi3GvxHPTpnNdKf3l2cY+JV4rxhtUpNjtVpc+lB9dn5dhKWl4fbxIizUr5GlQ+mkdPufcyyHER6PS28Vu5F56tGhyd97PvOf474h/ZnB/w1BNav6fT3nuaKa66MBeWte3cz5P/ANRbW4hxinS84SlMu5z+gm8Zus53h88NVmsvOG5a87t2npOA8G4vxfiFeg4X5q1r9XKcAD3Mu8A8J67xA9lfC1rzWuVNpwCfj5nr/CfBuJeBtdqeIa/VVkhD5mnPf8/eb8mck054YXb3Wv4Hp/DPgHU8Mpbn1OpoZbbCMk5GGM+KcJ4KmktNmoTLqcDPbHefVNZ410/GeGuw0bI7AKMvnHwPieM1aGt25sBm3x2EmHHS6+5doLL+YYxhRKLMzWl87CbtcbGK49P0j5kNrbmtSAR1nVi8q6uXfmB6t+0t38wRT3lekLyKQN9pJqbOSthjJ7SN4zhrz+iv/uJM5etbm11YJ3EvtkFAewE52oA/Gc5O3SIZVJp1zczZzytidOgZV7ObA3M52mXPPjqxJE6Fx8jQM52wpE6RwyrxnGrRY7jOTn/mcEnMvcRfmsJB6tKE051mYjMdYQiIgIiICIiAiIgIiICIiAiIgY3iZmMwEREBERAREQERAgJmY7zMDIOJIp5v5QZFMgQJ1KgfSQZYqP2wZVrDEjDGXakcgYAx32hUyMxbBGQPeXarHwFC/pK1VLZHtLaI6kYIIPvIsS4JbDHEmrRQDjB+ZqFU9HBPeYB8s+4jSrIGFznB+JG9hAIzmai/J3G01YgkDtmRqVhmCKTn5kCjFfOTu3QyzqFHkdZFyc1dSAEDGce8Ki4kQG0yYyBjOOsq688yhe4GCPvLV6g65Sd1UiUeIt67GHuISqqjK1k9VEqW+q0n3MvkAaYtnqABOfnI+QcwzWq9xMD6puB6pqvWVHsPDWobTqjg9xmfStDqxYisp2InyzgL/wAPE9rw69qgOXcdxPP5cdvZ4bqae4ptzOjp7QGnm9Jqg6AgzpU6gg5Bnneh6ei752+ZeS0Ynm6NXgjJnSq1Owl2zcXZWyTrZmcivUgnrLKXD3jbOnTFkz5koi+bC3PzLtnS0XzME5kAf7QbAB1jZpIWAMjdiRgD85qbRIn1AUHJElqpAg6k5MwzpWvMxwJyNZx6jTehDz2Hoq7mec1vFbtQ/NdaQP8AAnQfnI16vR67jqVk11epv8K9fzPQTy2s4pZqbsM3m2BsrWv0J8n3MqE6nXHk06hKs7sO86Wj4WlG3Vz1xuZqTRxOmuj0fM3n6kl7WOf/AMewnoaK1WsPfgD+VF3kWm0BxlgUA6nvOlVStfqbr2BhOHK1mqaqmx2HIAMjPafC+Ma9uJ8dvtBJUOQM74E+peOOInS8M1AUkM45R+ZnyVFTTUliCXbsO87eOfXLy349b4E8SW8H4kzqeVdh6ujDM9z4x1Gm4poxrKbBzWDDpnp7T5JodK1zLfd6VU+lBPf8D4fbqlD3B/LG4Elw1drjlvGbdDw/wNNRQgtADF1CDPvvvPWW+D9BqNbc2rVUoqVSeX2wJrwbhr+Yb1VlqTcH3x3mvirinE9DQfJRDpyuLGxkjPTMM5X4+X8Rrp02sv8AJGKK2IrHx2nKB5lexh1OBJ+IXGxiuep/WVnf0BQNp0ZS1AcoI6ZxNNUwV1TB3YSKpvVWmcjrNm2vyegyYbnSG64+aSBsRKeoBbU8nzmTWPm7lHQtiZRA2rct26SxjKrGlrxv7SPjmqWnhxTO5BlyhSoIxsJ5rxJcPxXl52xOvxwtea1FnOR+cgMkt2sPtNIYYiIgIiICIiAiIgIiICIiAiIgYmYiAmJmIGIiICIiAiIgIiIGYmJmAktagneRggdZsGxAvLWqAFRJ6+bb/Sc6u5gQM7S/VYCAQd4VMr2Y5Q2JujWKeUEY7yI2qdthNRc3aFWjYwPUzYOMdTmVizFSxO8K+W+YF1H32/eTVktZtiVKyM7y3UfXtMtRvqhisAtv7SIWBSp3PK2Jvb6rAGPeQ2YWwqOnOOkNM2ZLFwCQSDiUdfh3sxkYb/SdHZhy59XaUdVzVWu3XOD+olSqtwA01S5z6CT95zOhzOipJVgcfTKdtfKcg5EjNY7g9pqww+03q9jNWBBEFdrglvKwE9voW6exnzzQvyWqZ7fhl3Mg3nLN6PHXpqCQQQZ0qL2AzOZpslQZ0a12E81eqL9OoHeXqtV/3bCcxK5KKz2OPmZadlNYB3EtV6tevN+880y2qcq/6zZbtQO+fvCaesr1S/4hJ/xS46ieP/EavBwQPmYazWOP78n7bQnq9c/Ea16uJTt47p6hjnyfYTzJoezHm2M/uMyRdNXWM8irmWHrF6/xLY/porz8zn3a3V3+q52I/wAOeUCWFoDYCpn8pco4T5jBrAB95dHEcTkvu9NaEA+wxn/eXtLwJnIZlGP+7JH6d56GnQ01bKn595bWoY2GJWdufp+FogGSX+OgH5CX69OlagBQo+BLATAAxuZKlIHqbc9h7QztGtfRm7dB7SO1woJPT3k7v2GPk+05/Eb10+ld++PqMD5Z491fn66uhMHfmM8umgBdfMJLdeXufv7Tu8Ts8/i1lhAZs998SjzLSWts2yd/mejHiOOXe26VGusWPgAdB/tPU+GvFFegtqr4jYtWk5t7CM8ufieQ803+tjisdMyhqbzqbsqcov0j5ls2ntp+iT4k8O6TyatPxHSWKU5srZk/YzxPjTxRo7qzpdHYtiMMsyHYk9j9p880VOOoJPV2jXWqzlQdhJ6/UiN2DMG7TLkEAL1Mi2JUEjGJlWHOCo7yq3VMXs38qbftIi5NjDr2m/mj8Oc7cx3kCWfxPT3hpHavlMjSWgZtJHfEg1D81/L1AxLGmBIz0zN4xyyrpVHGmdm69Z4LjOoFuutYHI5sT3WssWjhTudjjafM9Q/Pe7Z6sTOjlUTbnJmJlprIyREQEREBETMBEwZntAxERAREQEREBMTMQMRMxiBiIiAiIgIiICIiAiO8yYCMxEB3zJktKDbqZCIgWfNJHzJq2OPUcSkDuJMbPTCrxfKjpiYDAWAe8qrZ6PeSAgFSesDok4GBLKWL22OJQZvSDJUfKj4mW4ujDupbfEr2DlZydgDkTelvST1OJV1VhGn26hsmFWKLOdQdsgzGsTnDOu/Y/wBRKmmvCtnOAe8vmxSckg12DDfBlRy61DWDfBIxgyNq+dW9x0ElvRqrifnY4mCwUh8ZB6j2kRSQYz74m/L5lX/cJveqpbkHKsMgiEBG+dj7SNN9PsVnq+D37gEzzCJkZGze063DbeSwfec8uXTF9G0Q5lE61CGcPhdoasHM9DpjkiefJ6sVuuvI6SXy+2P0ktC7SwEzMN7UWqPtNTUc9J0PKJmVp+INqSack5Ikv4Y4ydpfSo+0sLSNtsn3MM2ubXoWPcqD7DeWa+G1g55d/nrL61kdBJQh9ppnatXp1TZVwJaSoAfM2WsnEmCHvKm0SqAZIqnsJsqKPvNywxsIBVKgEkZmGLMcFsDviM+8q6nWV0IcHeEb23VUKSdvieV49r3tqYduyy9bqTqCeUEsZxONVeVonsbdsTUXTwtl1enNtjep99hOXULNZbzOds5x/hEnObUcn+ZsfeaWsKKxWh3J3x1M7xxrTVv5hFFfpRdsyGkL5oSsEgbZ9zMWOVU4+ttvtOhoKRpKRaw/isfSD/WaYWLXXR6TkJy/1Ofn2nIsdmIJ6mWNc/m2rWuSBufn5lR28zVKgGMDcyVqRZ5iqFj1OwmQxSgtjcjaR2ZVMt9gJmx88lQ69zIfWtoIpz8YH3kdBwWJ6CZ1VwDIg7bmaWDlrXfGdzLFta5Y2NtuTvOhp1JZcDaUKWV7j1OJ29GhyCR03xOkcK5vie78Nw4VKfqM8DZ9R+89L4r13nawVg7IOk8wTzHM0xTqMTWZESIxERAREQEREBERAREQEREBERAREQMRMzEBERAREQEREBMzEQHeZmIgZxMTMQAiYmYAbTPMZiMQJEflkteWz3lYSdLORNusKuK2VxmSVH1Y95Tpsy/KT17mXAFNgAI+8ljUWq2KOOmDMW1+aGHvNraiKEs9jibI3MMdxI05FbeUxV+2xEupeEwp3Q9JDraitxcdD1kVRJ9OdvmEX2dLauQkdNj7Tn8xqsKMMgnqZI1b1nOesjb1rhxuOkI2CBvSeh6TC0sp9J29jCcwxtLVSiz0nrFWNERlxkd9jL9B5bAc95EmmLMFBx8zoJoXQDbPzOVdcXreDsTUpztPV6QkAGeO4GSECt1nsNJtiefJ6cXaoOQJcQSjpyMCdGvpM6VuFki17zKCSqJdIKmO0lVIAkiwMhZsBvMTIMqNxgTPNvNMzRmC7kiBNmR2Won1SrZqG3CyNa7bT3+8itb9aSxRTt8St5DXnBLcs6KaNQckbyYqE+kZPxKbUq9HXSgLdPaeb8S2B6WRRsB+s9TqGVAWc83wO08Vx7UZrtsPTtCyPn91o0tbkgM4+lZzxY7WAFss25PtNrQ1lltjtnmO020mna7UKg2HeemdPNeVzQ6UWubG3QdzMavUg3MFP0jH2k+u1dWg05prOW+Jwld3XJ6scxtJFoMxBsbvtNKyTYWEw59KpN1bk6Dt1kabMxa0g9FmmT5zOTgDYQ1nIpJ2ZjInYrWzN1PSaZ2wXDN5jYODgZ+JpqHIqyx36zalS7DP095BqD+I1gVT6F6zUZtX+HUkKuRkncztX2LotObWPaVOHVYXmPtOZ4m13l6Q1hvU2wm4515PX6ltTqXtPcyoJsxzNYYJgzJmICIiAiIgIiICIiAiIgIiICIiAmDMxATGYiAiIgIiICZxMRAziYiICZmJmAiYmYCIiAiIgIiIGyHDZMsV6jlOTvvKsQu3o6L0t0p5m222zIfNrVwebptOKjkDGTj2gORneNL7OxqNRU1eF3OM46yqrJsVU9NxKQY+8C1l6GNHs7aCuxMHOP6SI1ioK3UHYfEoV6xgMNNm1ZPMnVTvM6a2ssyLuu4+ZNWS7gVgEnpOcLubf4nV8M1/ieN1VP8ASTJeJtZzdL1Wk1XlczVHbfp1E7HDtPa4BIJX2M9ppuGVipVIBx7zA4etDnkHp64nnvk29E8enO0Gj8twQNp6TTIfTK1NAUg4nRoXAnN2i5pxOjWNpToXEuoMCQTLJl3xIl2kiyokEkBkYm46yI3HSbTUTcDMDVlz3kf4cMck5lkLMhBKK66dfaTrVtsMSQACCQD1MDTy8dTIL2RAd8SR7QNszn6hgCSWkWRS1ljMpxss8T4ksHkmrOx6z0/FNctVJAnzzjN12ttK1Z+8uPNby4jgOLL7RXUMDPWdAW18Np2Ia1hK5ddIpRDlh9Te04l+razVKMk7z0R5alttNtrMzZZjvJh9PNjaVkT+I2evtJWcsAiS0jfn52JB6dJuzhGAG+BvIT/CXfaamzAJ/mPvEZ2WMbLgO5MlYix+uQvWVk3yxzzN0m72rUorUb9SZuMVLdcKaD2JE14fSSvMRkuZAEbU2ID9PYTu6GhUIz9K7yyM2rlhFFKKMdN54Ljmr/Fa9yp9K+kT0/F9b5VFhOx6LPD2N6jNMVHERCExMzBgIiICIiAiIgIiICIiAiIgIiICIiBiIiAiIgIiICIiAiIgZiYmYGJmIgIiICIiAiIgIiICZiICIiAmYiBlTvPR+DsDxBR8zzc7/hG3l8Rab5aZz/rW8L+UfbqFxWJh1+JvUvoE3InhfQRIvxLdS+wkKrgyzT16wLlK9JdQCVadu8tqekpUgEkUTRTJR0hlkCbAYMTYQNl3ki7TQSRRAkA2mcbTIHzMmBoZGxkjSCxsAyCC11AJE4nEtWa1JZlVZe1l3Kv1YnkONa0lGHqwJHTGOLxnjVRdgjA47zyeo4m9nMlR69SJFxfUO1rA7Ant1lBr10+n2Hq7Cd8cdOOeaTVW8lBUHc9SZz6MG8MPVjvNbLGtOCdpvW/IoWsYz1M6acd7Wy/JY+Orb5M2ryi8zdT7TVV5mRj0x0keptwwVc8x6D2EujbJfz7SWPoXqZqQWfm6A9PtNAQMIBt1JzNDY9z4TYfEsjG0/mgEhew6+0iDc7ZI3Owh1FSlOrH6viW9DpSxDldidhKlq9w/SnlLHqf2E6L4rqOTgkfoJhAtKcxOAJyOJcQwCAd26/Am4xtyeNakWWBFOy7mcAnJzLmrsLszHqxzKed5UpETEiNsTUzIMQMRM5mICIiAiIgIiICIiAiIgIiICImDAzExGICIiAiIgIiICIiAiIEDPaYmcxAREQEREBERAREQEzMTMBEt6LQtq22ziek0fhJrE5ipJMW6XTyMT0mv8MtpmYgECeeuqamwo3URtGk63hl+TxBpDn+ecmdDgTBeNaQn/wDqCTLqrj3H6DoHMgx7SQpia6AcyL32l405ngfSijyyeob/ABNmpIPxNlTEKs1mWUaVa5Op3hFlWkoMrKZKplRODJFkAMmSETJ1kyrmR1iTgQjIEwZttMHG4gRvsJT1DYlpsATnapjg4mbWo42vfczzPEAbcr0HfE72ufGd5w9TaK8lhJHZ4fi+grW0uSczzeoQmwbZHuZ7XiV2m1BZSzo+fbrOBqdGnNhME56kz1Yb1y8nl1vhxhUPMye0lFTHbBWXTStFnMf1kVlgLPg7Z3zOunC1i2zy0RKx6pSOzZJye5lkuoXbcEyvy5YnGcS6Z3tq5yTynAkqL5dWf5z0m9dTZ9Q37CXKNFz5eyRVXTaY2vlskd/meh0un5Kw7KVAGwkNFaVgErgCR8S4oNPU3qHMBgCbjNa8Q1qr/CB9Q9pwtY+Ew3U7mZqfKnUW9TuMzk6zVNdY2DsZWUFz87mRRmJEJiZiAmIzEBERAREQEREBERAREQEREBERAREQMREQEREBERAREQEREDMxEQERMiAiIgIiICIiAiJvVU1tiooJJMDCoW2AJlmjQX3WBQhwZ6Th3h8siuR+onpdFwauvlJX9pi5yNzC1X8N+HwiKWSe50+jStMco+JDoKURAAAAJ0Q6gdZyuW2/XTh8Y0KPS/pHSfN9fwZn1T4WfV9YwsQjsZwrOG+ZaSFlmek9bXzTUcFetdlOfib8D4VqTxOiwoQqPPpR4MjdRn7iWNNwujT4IQZz1xJl5eNN4+Lndep4aPQgH+ETr+Xhek5vDCvpGO07YXKzzvUpMm3SaYHfaWbVxKxOIVsvxJVkHNJA2IFhTgTdTKws+ZIrbyotIZZrGwlWveW6x0hFhAJMPmRoJJ0hA7TQmbE9pGRAisbYzm6ptjidB+852p6GYreLh6qvmJnI1NCOMFj+U7WpPxtOXeQoOO5xEdK8pxHShWJbm2PYb4nnNVqURm2KjmwCd8z1/Fabr6g1AbJXt954ziHCdWbP4gOTuTPR48nn8uP6cl9RbdYzBwFB6yGx2Z8u7MPedargtzgKqkKPcdTNm4S9YwxyJ2mUee4VzqeZgAtZbfvL66Vw3rBwTnlWW9PQlFY5iF74x1l3SrbdYBVUTnpgRsmKrXo1qUPb6F9h1MkttXyf4aAVjcmey4X4H1WuYXa1iidlxPTL4O0H4VtL5IIZcMcbzF8snDU8dr4tdxEKWVN2HSclybXZ7myo7e5ne8YeFdT4X4iWId9LaT5Tnt8H5njrr3JOTkztjdzbjlLOK31erLnkU+kSodhMTBlYIiICIiBiIiAiIgIiICIiAiIgIiICIiAiIgJiZmICIiAiIgIiICIiAiIgZiYmYCIiAiIgIibJW1jYVST8QNYnT03BNVqD9GBO7o/B72Ac6mZueMbmGVed4XoDr9YlPRSd59W4d4Ipr0i2FFJxkHEp8K8KfgLUuCYx1nuqtXjSrWcDAwJxz8m+nfx+LXbzi8PXTsEC7CTGrcBROjYvO2ZtVphkHE4e1d5jEFKMqyYI57y15eBNgu0ltPWKZ0+TvJFoA7SfAmZOU1Fc1CVdSwQAfMvscCcvUNz6hF+Y0r0nB05gD8CeiSv09Jw+CrsJ6atQUEsK5uor22nOsG87mpr9JnGuXBMLEGTmOaYzvI2bH2kjSUWYPWWKnyZzOb1S7p98GVHUp+ZerGwlKjcCXa+krNWV2m+faRpvJAPaEYMjYyUgY6yI7bQIn7zn6lMjedCwhBljtObeXvJCjA6iZaji6okkhRnrjE534cu2WPQ5noG0mMk++ekjOmGehjTe3G8nCYA2lPUcPps3evJHaeiOl7lRNDpM7ld4NvL28NVkwlYXM4mq4LfZZ5aEkHr7z376bsB2k2k4civzsN5rHKxMpK8RwzwRdqHDWnbsJ9C4T4a0XDal5a1ezuxl2rlrA5dpaR8y3K3tz1J0mrrXuPykhrUjpIlb5koYEdd5Ecfj3AtJxvh9mk1VYdHH6H3E/M3izw1q/DPF30uoUtW3qqtxsy/7z9YMNsmeX8YeFNL4p4S+ltHJYPVXZjdG9508edxv+mPJhMp/t+WJidnj3hninh7XWabXaZxy7i1RlGHuD+c52m0mo1lwp01Ft1hGQlaFj+gnr3Hk1d6V8RN3Rq3KMpVlJBB7GaQhHeIgY7xEQEREBERAREQEREBERAREQEREBMGZiBiIiAiIgIgxAREQEzMQICJmICIiAmQpYgAZJm1VbWuEQEk+09n4e8Lm0q9iEk+4mcspjGscbldRxOG+Hr9YwLqQvxPc8K8IIigsmPynpuH8Ip0qD0DP2nWqqA7CeTLy2vXh4pj25Ol4JRSB6c4+J0Epqo/lH6S6iZYCZv0oev5mOXXSBrqjV6QM4lMLg5kiUeUSCSd9psRk4irGyV82JYVQoipeVB7wzYzIMM005szRnkfPFRPzQW2kAfM1Z87So2tswpOe05dPNZqvVnr3lrV2BaiMjJmmirPmqYHseDpkT0dS+mcThNeEE9AnSWGXatqU9M4epXBM7+oOVM4uqG8UjmP95A7bye35lZusmmmmcfrOhpUJwRKKqWYfedfSVYGMQVepXGPiXFG8grXA2lisZMMpkBAkudpoNhDNgZ7So2OOp6SrfeEGwzMXakD0hgPvK6c3msV7ggnORJashh7LPWNsgib+UBsBJAMAAbmZH2iQ2q+UGJ6zHkgbAby3yYyQI5f1l0imdP3xNDUOmJeYbSByFXJhdqL1gOJqbAm2QTM3WYBJlFhYWyUbl98Saajo0W+baFB6y0BsMMwM5umRjYrfSMzucgC53/OWRLwhBZWG/XvLC7gSAuOYAyypQqPmWMsseggqemIwCcR5gBw0bRx+K8Hr165wobHQqGB+4Ox/OUeE+FuGcH0+sXR6ZardYee5xseueUbbD46T0p5Sdo5OYg7Sy1myPzf/ANTPDWq4d4i1PEj6tPq9QwUBCOXAG2wwdu4njdFw3V8T1HkaKhrreUtyr7DvP1fxXQUamo+ZUlnltzKCueU4OSP1nF0nhTh9NXNRpa6TqSGsZUAIAG2369Z3nk4043xc7fm7X8F4nwwVnXaHUacWJzr5iEZXpmUJ+nOIeH14vTq/PdbKtT/C5COiff8AT/8AM+Uce/6UcT0LO/DebVqN/K5cMo/zdDN45y9sZeOx86mJY1Wk1Gjt8vU0WUuRzclilTj7GV5tzIiICIiAiIgIiICIiAiIgIiICJiICIiAiIgIiICIiAiIgZmImYCZRGdwqjJMwOs9Z4Y4C+puWx16mTLKSbaxxuV1Fzwz4dLsruu5wZ9L0OhTT1hVWY4bw9NNSqgdp1q6sCeHPL2r3YYzGcI0r+JKFAEkC4kbE8wEy2jBYWZHSLNSQME7y4Aq18zbmc29w9uB2muoTloWyZLUuSCRIiu4lypNplbwy220r2naTuZWsPWGVextpFz/ADN3BwZCR2BlEnPNefEjJxI2JP2lRpaDqNQqDOBOppKx+JrQe8paGseY7n7Tq8NXzNevwZhXsdBSVRftOoAQJX0ictY+0tHpNs3tT1RIzvOLqG6zs6wjE4d567wuKlZ+srtuZO5kDE9MSNJNOhZ9u07WnQADaUNDSSAcbTqhOVdoRNXLCCVqx8yfOFlRMXCiVb9SQQoG5OMTDMSmQRuDj7iZRDjLEnODgzPZpAlPmEMcj3B3lkBVGBtDHvIg/M59pZNLeUwmwM0AwJnOJplttMFpoWxIzaD0gSMZT1FoGw3krc7jA2EVadAN92z1MCqunZ2BJ9XX7S0KQ2mtXqwwST95rZYtZklTg6a4/wDbByrgBRgTpv8A3A9yBOPXZZZqQmAMAnE6i3c+mCvv2Mkq1WUMzdsSR0OVGTgCWRoioV2bCMcA9d5ztUdZRYQtXMQdz2/KLws5vDsabSc9KMbQpY+kYzkTTVaYovOvrTucSrwy610sc21thwpRh0+06GpqZ6gtbpjO4Hc/M1qaY5mTm9BkNt2zLnkaiqoOUypAOQc7H3nC4hq9Rw9ydTpzZp+pKbnPYidfScUcOFvVjYFB6DYHsR9pnHX1cscu4WitgxI2OxHuJXLpellagjKYw3bJxiQa/idek4ny1oWRmBULvn3ncXyddQ91SfxHPXGDt2M1LKmWNklrn06dKyqKvKqDb595nVV1snrGSBkN7SRdm6dTvM6gLjncHCDmOJZWMo/Lfj3T8THjPiJ4llrrLTyYOfQPpx8YwJ5YjE/S/EdDwrRanX+ItRpPN1jIXYheZlAGFC/J2HxmfnXjGqv13FtTqtTWKrrLCzIBjl+MT04ZbebPHShEzMTbBERAREQEREBERAREQEREDEREBERAREQEREBERAQJmYxAzETKIXcKOpMDqcD4a2v1i7ZUGfZOA8JTSadfThp5zwZwMVVLYy/PSfQqqeUACeTy57uns8OGpttXUBjaWFWYVdpuNhOWnZE+0hRWZtukmu6STR8neWQvSlqbShC7jMgrrBPMRLuurDNmVUGEwJK1Ojly+ZOuwmiIS0lbYYEkSo7CMSuyyZ5oRvLGarMudpA1Z+0vtWJp5eZRR8omZuRatOcjqJeFYG85vEnLWog233irEukHJR03M7PAavM1nN7TkgcqBRPR+GKTzFsd5lXrKUKoJKek2RdhMWDlUmac3L1r4E4t5zmdTWt1nHtY5Mza3jED9Ziqs2WgY2mpyWxL+krFYyRvLFq/QorQAScHmlZXz1m3ngZHMBtnJ6Qi0DgbCas4J5R6uhBBkHmM5BB5PfB6zKvy7CTsWVGDzMcnrNi64lbnPvAGe81INrLebYCaIWJ6SQIOmJMi4HaNJtHixu02FLk9ZYVRnpJOUASptUOnPc5jy1U7ywSMZBlZ2wYNsWHyyCR3mmqcovMDgdzNb35kZM7qMiRVv+I0YViMnK5/pAhu5tRT5qHPL6XHv8yTTczLcmdmTaQ6W7yy1bjBIwR8y1pXoHmVmpnL7KVOCpz2knLV4VLBZpr1dlbIGCMdpheK20k2q1aoWxysMs3vgdcfM73mV1VGt8BqwPrGc95yOVWtZqa15mOeYL0+BGtNTKXuLdHElfm8oclfQ5U/rkzP4lmwta5A25mmlWm3Bb1H5ltKwOiwnDStWO5GSeu0nClRsSJsq/Ek5ZWaibLLhsMD1BGcyAU1Ld5nIVf3Et8u+CJqUhHMfSU062i9FPMpOGPQfedTSvpKnY5aizqVzs3z8ia8m3SRsm2AMRODLmI/Ppr4hVp7WKi0kIeuT7Zm3ES1FAfGRYeVffPzK+u0nn0J5fKLqXFlRbpzDt/pJr9cV4S91+mYOw5tj9Pz95Z3qplJqWPKeIrGbT6fSrnmtcuR/iA958k8YeBeMNxrU66rR8vDyyKbwAMEgbkDc/efVa1e7VtxTVHmAIWuvrzHOyieluqYO3M27KC3MMgjAyN+3WdMMtdOWeO+H5f4j4P4zw0WvZpGemr6rayGXH9ZwSMHBn6n4roeFVUsClOn1NqkV2bgBsdWA7e8/N/iLgWv4FxN9Pr0TmYl0tqYNXauT6lI7fuO874Zb7cMsdOPEGJtgiIkCIiAiIgIiICImICIiAiIgIiICIiAmZiICZmIgO86vANJ+L4kgxkLOXPbeA9ELdT5hXqZnO6jeE3lp9S4RpV0+kRQMHE7CDpK1C4UCW1nh+7e6fpsJuOk1mScCVUGoMrgvWeYHaSWep8TGqsFdGwGYa0he83ZGMYikgn4lXSuX5iZarGDmZXWuFnAAmjGZzmasYjKM/VNhMTKzUZCu8BMTcYxB2lEL4UfacZc6nXOSDyrOzYPSZS0tIVnbHUzP1rpvybT2XhvT8umViNzPKFMlVA6nee94PUK9Mg+JZ2zenSIwJU1L4XGZbsIAnJ1luCd4rM5c3WWYJnIssycS1qrN+sqVrzHMzHXpvp0w2TvLymV0Xv0m6sC3flBAbHzKiYvtnI2I294Ulj7YJwO2JCGLhSQQAuMH7yQNsY0iYNtN1OZBzdhNwek1pFgSQGQqcyQHeVEyyUHBlYNNg+T1gXFYD5mzWDGOsq+ZgTVrTjaDTa23l6dJX1NpVWdMkYyfiZswKnc5PKMznJqgdQaebvsT7yUSJqS99VhJAbAOf0M2QGt7KME4bIkVlJRWZMBM8wHsZR1fEmo4jp7+YPpwCLU2Bb237YmW5Nu3fwq17HIsAayomsr0De5nmtKnE/x93mW8lakcjDIYnvkTpU8a/Ho1WnqIUP1B9OO33PxLmm0+PUSSe5Ma/SzeM5b1VPbjzCSPaXq6guwGJiuuWkSa0zayibSVVxCiSKIZ2wBNsbzOJsB8So1CRynEkCzPLGkVynWalJZKiaFYNqrLmc80vW+pqYg1X4IHse4nWdRiVb6w64xk9R95GpXGo4ejamqtjmrR5YjHfrmY1/Fg17V173cvNuPRWP+4/6TpXXm7QWU0MlF9inDOO49jPE6gfh6X0dIBf6tQ5OSWJzjP3moxea5HEdXbqtS1hdvpPrbqx26ew/8+ZrxjgOk8V+Haq7K1q1GhLk2Ljm5D1xn5GTOlwvQVayy5rA7Vov0IMs2cAAe0341xWrwxw83al10OkcFUpOHtu98DHt9gPedMbXLLrl8G49wduCcSfSNYtgG6uO4+R2O05c6fHeKnjHFLtX5flqxARSckKOmfmcyeiOFIiIQiIgIiICIiAmJmYgIiICZmIgIiICIiAiIgIiZgJ9T/wCn2nC6cPjfE+WDqJ9j8C1hdAp/7Zy81/F28M/J7esSwBIK9usnWeR62wEN9MZmHPoMqqVxK+oCc03vdcARtmdZnBQrKjUIpyJK3GyIqLgSRQQfiVwd85lhDkTK1LNTMiMSxitcTE3Imk2w2B3mczTOI5swsbLUbQxwCAN8zeigLTsNgJtpmA8xfcbyezCUADGcTP1q9INDR5+sHsJ7jRjy6wJ5bhFfK/Mes9F5/KnUTTF54TanUAA7zh6vUjfeSarU5zvOJqbSSQDJVkaW2mx8CWqq8KNpU0ycz5M6h5UUZIHtDVREEjCg4yASB0mj4cqoxgDBPLgkgmTDIV1A+rZiG9jNOXGwmZyMDYbTaANptiaZAcGSKZHibqMYlEymb80iE2BlRJzZjmmkyFLdIG/P+s1D7bHf3mGUqN+plLUWMm4JkIu12jksRtub07zkU0NdxAVcwQtYBk9BNtPxJlc12HY98dJMNbVptV5xqWxlOcHb95GpLOnorOE6dtL5Nljlm2Fw2x+U8RfpBXqbKSRYvTm7MM9p6O/xBTXpQdKvMQDzk78hxtj3nC05e+3zbMcxly18MNztf0VARAAoAA2xtOzSoAlLTKABOhUMAREqZF6SdRI0G0mUQjcCbCAJuo7YlQAzNgu0ATcCVnYOkzj2MziDsYLWp6TQmbnpIz1gaMBIHG0nMhsEiuXq6FuAU7YPMvwZ5luD6/Wao0gmuldiznZj1z87z1WrrUqGY4C5BOcdRieb1fGrFc6LSP6vp8wHfPsIirQfQ+HtFqF0559RXWWezGeXsM/nPmnjLR6nifBdZqNVS3NWvmi587fmfuR/5t9H02i02g0z1ahhfq9SMmuw52+f0n538Tcf4vxHier0+u1dprrudBQGIrUBsYC/lO2E3eHDyXUcCYmZjM7vOREQEREBERARExAREQEREBERAREQEREBERAREQNk3dfuJ9q8Gry8OXbtPi1Qzcn3E+3+EV5eGp9px8/Tv4P7PVJuJMpkSYxNx1nmetJmauTyzPaR2EkYEgqPkdZq7jl37SYgDrKl3U4itxqrczYEspKlAwSe8uIJlalWb4mFG03Am5HKtCJoRiSkTRhKiI9JjM2YbTWRpY0as1xVK3diDsvWbgl1GfsQexm/Cr7NPrkesgN03GQYf++t5jzMbGLH5zvIvx0NGQiCTW6jO2ZRrswuJh32lTTW6077ym/qkrnOYVfiRWlB8s7y9yllHMXDHK8pAx0yDLA4De+jXU89RRlb0ucb4yN5FSGwLHwWZRk/lFll5SWVKfp9ydzNMTJOY7ShiZCzIGZIFzKjTkmeWS8sYlEeJt0M2ImDCE3U8oz1kWd5nBU5B3hEjkuCV69xOTq2JycbS813KDYhwy9R7ylqjVepdDynvJWpHNr9VzbbAb5kl+j1Brr1CjNLnDDuB7yKmiw6rkJXf+bO3/m06+v1a+TiutQzFQVPde5kxm41bZZI4dwC6p6lcMEPLkDadLRrggTl1VcljjOcMd51tPsBvBa69Ht2l+vqJzqDgCX6z0MrK0u8nXYSujSdTmVEy9Jss0EkUysth0m4moO02WGaziYJ7TbM1zKiNpoeskbpIm6yK1P3kbzdpGxkaVNTULqbK26MCJ5/S8LTT28yIDaT9bj9Z6R+8hFYcNuqso2z3+IOdORrdKh1qOMknGWIyQfbM/L3HH8zj/ELCCObU2Nv/mM/UestavTuyNm5hhAexn56/wCoX4VfEXlUOj3Vpi9lORzZ6fcCdvFeXDy9R5EzEHriJ3cCIiAiIgIiICImDAzMREBERAREQEREBERAREQERMwJNOM6isf9wn3LwsMcNT7T4hoVLayof9wn3Pw6vJw2v7Th5+no8Hb0K7TcNuJAG2meaeZ6osHeQPaFeYFuJDYSzjaNtaYtfJ5pXL8x2mdUCteV7dpFps/URJWos1pjEtIsjQZllF2kjNbBZtiZHSbY2nSOaMiRuJMZGwhUDCaYkrDEjI3mVS6c/wAdAPeTEgW2jAGG6DtK9DclyNjoZPfj+0LlwVzhsGT61Om6v7zLH0yI+0yTkYmhrnebD1bDIBIBPtIz13llTyK1IUc7Ao+D2OCDM2iWnX663TnR+YV05wHXHcHsfYywAFXEhqq5NsSYia/6zx8YEyBGJsolGyiSgTVRN1+YRsBGJkTJhGhmh6TczQ9IGhJ3M1Zz1Bm4Bbp7yKzBPXBECG4q53PK39ZzLBbQxZAWU9Me0l13OpB/lPftGhuqtsCX2BE5hlsdBJ3dNzpWu0usamwjSW7LzM3LjCyhqiNT5IWxjhcOc9fYD4nuLrntcY/u8d+mJ4IvU/EtQaKzVVzkBCOg/wDN/wA4s0Y3a9QoUACdLT9pz6+oEuVNgypXYoO4nQqPScrTkZE6VJhKuJJ1MrodpOsqJgZuDIhNxKylBm4MhE3UwiXMwTia5jOYRhpE3eSNIzIsaHpI2O8kMiaRpExyJTuZkzgZU9ZbfrKzkK6k9Oh+0g4vEXFWkby9mbo3+EdzPh3j7hdy316ymuv8KFIYoNwSep7ntv8AM+38Vq0991lIZhQo9RP9J4rxDwWzW8N1C05atkICkb9J2wuq5eSbj4YZibOpRipBBBwQZrPS8pERAREQEREBMGZmICIiAiIgIiICIiAiIgIiICZHUZmJmB7TgHDdLYtVnpDfvPqHDkWvSqq9AJ8W8O3Xf2hWit6QRtPtPDx/6WvPXE83m4evw8xcDYMFziaMcTXmE4V3So+W3lk1rgGUM5PpkmkvdyyP2ERWbWUuVJmEUZAAkFlbtfz52lqobTNdE9YlhRtIkEnWajlW6iZEATP5ysozNGElI2kTj2hULyMjElIkZkVoCQQfmWNUwTXV5AXmqxseuJjS6b8TYwZgqJu++4HvPWZ4Q/DmsatLQKxnmG4KxrfJ764eW2JmQRn3kVPNZUzqmCclVJ7dhNEsCvWXtQeZnKk7rj3+JLlI6TG3eljlITzQTkMBsARvLOnQhRz7sAATIKa+Z8nlPLkKVzuM/MuqPbtE/aXjhuOs2mAJkCaYJuomBNwJUbATcdJqOs2EIzBgzB6QjUmRsf0m7GQFsQrdWxnEr6g8rg9m/rN+YZkWp9VJH8w3EiztSvuILDbPcdRKZCrWSAAX2H+sluBuAKkqw7yd+B6q+vTvVahLdQxwMdiJJLWtydt24tVVokN6PZbggJzbYnCqHNaXIAaxi5x0E31y20XimxlLklSF7EHEyq4vAXsstu+KuMkm4soNpYq6yBe20sV7GVl0dO3SdOk7CcjTncTqUnpCVfQ7SdTKyHaWFMrKYTYGRgzcHeESAzYGRjpNxKNwdpjmiYJhGc+8jJmxJxNJKsasTIW95M0jMyqF/eVL8FDmW33BlWz7ZhXLtobUNhhhTuSR9RnmfHHE6+B+HtS9Vy13chWs535j0x89/wAp7LVOtelDGzy1U5YY7T5B/wBSuH38U0w4gHNWn0qsyIx+odz9ztOvj5s24+S6lfJGYsSSSSTkk95rMmYnqeUiIgIiICYxMxAdpjrMzAgIiICIiAiIgIiICIiAiIgIiIHo/CCK/ExnsRPtWmUeSuPafGvBtedfzD3E+zac8qKD7Ty+bt6/B0yyjvK1np6SzccAyk1m85PQs6RlYnmO8nXkVnboDKFZy4wZcsXCge/WS1Ycys2F3k6DEgSsJjllhJlpOkmUSFJMs1GKkA2mZgTbrKy0MjYSUiaNArmRtJmG8haRUTvZVzNVjmZcYPQzpotlnhK/WAhEFnlsh7EkdP1nO69ZHrvxdfCGSq4jTvcjWV+2D9UzZtqdzSHi/Eq+HaSt2YqgcZKjJwJ0uJ10au3hfF9KUBNJ3G62p03HuDOLqtI2s17Ui1L6VAPMu4P/ADOvo+G10qMCTXtNN79bK6NdeFQgY2EnVSO01r2UD2kwM6OdrAHxMgRMysg6zcCa95sDDLYTPaYiFZzNSYJmhaBhj2kLTdjmRucggGRUbn07SO5uhB3mQ2cqw26GV7S1ZxjnXOJNtIkdKbHewHAYcg7Ek9/jE79xa1lfKqo9WQe3+05f9iajVAXBkSo7DmO+3fEpazh2tr01lVFwsAGeUHBHzN48RjKS1zuL3JfxBNTUOVHbHJ3x7zGk9dljZz0Eocvk6lUcEMBk5nQ4bgo7E9WM575dtai6BJEmCPabqPaac6t0bTo0NsJzKuo3nRpMqOhWdpYUypWdpYQ9JUWAZupzIQ2JIDkQiUH5m0jE3BhG4MdZgdIBOIA9JqZsTNTINTIz95J2kZEioX2zKlnSW3EqW9DIrlaxW1IX0/wwwB+T2/KfJf8AqvqdVY1FFfMulrJNmOhbtn7T7JfYqaMDl9YOc+0+W/8AUe7R6bg7tfYDfevJVVjqff8ALrO/i7cfL0+MxBiel5SIiAiIgIiICY6REBERAREQEREBERAREQEREBAiIHs/A9XNqVbHefXacFAJ8t8CV5ZT8z6jTss8nl/s9vhn4s3LhTOey81mB3nQtPoM5yuFuH3nJ1XtPpCMMdpvdz88si0GsbDpKrW8zADrnEVvFumR16yZRIlk6zBUiSdZCsmUzUZqQCbTCmbTTLQyNpIZG0ghYyEyVhvNCu8K0A2m2rS7V8Ns3Wta+VckY5/jMz2JnV02mps8McQcmzzNnA/lPKYs3NRZl62VQ0WlSmlcLg43l0DaQLbzDPYiSq20Tou9pVMkBkIM3DSolBzMyMGbAyo2zNsiaAzMI3zNS0xmaFpBsWmheaM00LyLpMBneQORnHQ9puCSo/aVtWTjmByR1H+sVZG7LzYYbHErpd5VjWkDNXqwe5kiWgpnqCMyzXwq3XaPmPPWpGQ+Nid/2iTa9duzqXXUCm6p0ZLUDAKwOPj8p58P5fEbWYt6UwfzlKrh3EKdQaSBWy7CwH0tn2MvMr2Vo2QCygEt95u3aYyT64lmjr1eodhswGAwlmjT6auhq6GL8nV+xMcQ1fCeF6cpfZbdYfU1dZxzfc9hItNqW1enUV1pTURkIv8AqZz6dO09eSmT7yVYCgKAJlZqMVPX1E6FPaUK+sv1DpDK5X0lhJBX0k6yolWTL0kKyVTCN1m81G0yIG46TPQTWZhGdpg9YiFaGRtJDI26SCF5Uu+lhLTdDKt2SDJVcvW3V6fSszOFI3JPt3nxDUcH4t438T6i+0PXo67ClZYHlRM9vvPuHFdJTqeHnzQTysMleuO4nNWhbafJqr5EB2FYwPznXDLUcs8d3l8u4/4I4fpeFY0gYW17tYe+3afM2GGIPUT9K67gZu0rhj1G2Z8V8R+Ddbwp7tQ1lLIzkrWr5bl953wy+OOeN7eTiInRyIiICYgxAZiIgIiICIiAiIgIiICIiAiIgIiZgfRvAifw1M+j1nafP/Ay4oU/E98pwJ4vLfye/wAX9WupflWV6KDb6u031JJBm/Drgisp3mI6LgUKgHxK6gNccDGJOzjBzIkwRnueslbkTL0kyyJJMskZqRZKsjWSgSokWbTUTcSstSJGwkpG00IgQsJGw2kzCRMPTCq+s1H4BUe2vIJB5ScZEu6nx1p2ts0Ol4Mw8yrKrzYDDlxkD7/riZ4jp6tVoPxldleoFFDI1Ngxj4/eef4Xwu3+2VatGusp0gZa84O3UCTLK48RcMccub8dTSs61qlgIcAZEuo+04PC9br9ddZqNc3q+lFx9I9p2UO0TpvOaq0Hm6nMgQyZZpzSibiaqJIOkIATMzMQjQyNjJGEjbvI0jJJmoz+c2baABy57SKkrVXTkPeUtW1lIDbhlOCfcToVchHqkOttQMFbGMd+8tSdqdLU2HYFQPU4Ht3noqtTpLaKadLeLABjkLeoGefaofh2ZFCljvgfyiZ4fw3V/iVvQAhPUFzgt7TWFM5NOrrctStRbL84yo6jacZrylhrNDuV3IU74lfW6izV63zKRbQKzj1HDFu+ZIVflsWti1oQb9yepjLtrHHU5cTX8N0Wvsss0t7U32blL9gT8GdDR0W6ULXavLsPt+so8WTk4jWVXCuFf9Z3aSraM8/TGR8TDfw7fGYA6Qn0AH2mQJqOdS155hL9PQSlX1l2roJWV2uTiV07SdYRMskWRLJVhEgm4kazeBtMzAiEZmImIVqZG8lMiYyCFumZXt6SdzK9nQzNVWeoPgE4+Cesi1Opq0GmBKfAAExr7m09AtC83KPp955bi/iHXabiLaZNPSFCA8xy3Ue06Ys5LGt1V9lb3NhQN8dhPnPEbLdbqbmYYL7Y6bT0eq4tq9anI7Iqg4wi8oMpW6PydO+puArrC7u+wE64uWVfI9bQdNq7KjjKntK8vcW1FWp4jbZTjyydj7/Moz0PMGYmYgYiZmMwMzERAREQEREBERAREQEREBERAzMDrGZld2ED6l4LTGkU/AnuE+meL8H+nSL/AJRPXiw4nh8n9nv8f9YkZBaeWavom0rCwN6D1mNPbi/c7GdDVNz1co6ESfHSdue1jOwUdO8sIMYAlI863hR3lxNsCc63VhZKsiSSqJYzUyiSqJGgkqrKiRRvNwIUSQCVlGRIm3k7CQt1gQt0kbbLJj+8ra3TW2aF7arFQjbJhUTcM1vFKimjsqWlbUGoDvglc56d98THiLhF2kbT8X02qNTpYtVlSPg8p6dJwLrtVpdVXWupbNp/iNX2wOoBlXgmmv4rqQbbm8us+acnYLntn+kzbjfjeOF3vfD01Cqn09JaQypTZVZY3k2eYg7y2g2momXadO0tIJXrHSWUlYSqJt0E1E2zAzBmAZjrJsYMjabscSMn2kVr1+0WNy0kwDhhk4zNS43RtweuYVvSear7GVyV1dhRtwf2k9aFNLdgEkHbAzKrVHSUvYv145R9+pgkdnR6M06YX5Qq4wijflHz95I1h01otTlY0gnrtjHT+k4ug4rZwui4NWLK9QcBcbjHUia6vXW6+hK0qNI5svg/UPadJZJwz625cqunYvZfqLWDgszEjoWP/M0BaphYWw2M/fMkWjylSnJA6kdph6lst8tXGdzic9/t34a690sqSyyhLCg5gxyMfEkRi9S82AMZCgYlbVMDTpkU5qLHn27iWV3Vce0IlHXpNsTA7TYCajnUlexl6qUkG4l2odJWVyvpJlkFcnWGUqyRZEpkgMKlE3EjBm4hG0zNQZmQIJxEwesDVpG0kkTGBE0rv0PvJ26yJhkTNVzuK3PpuHG6vHMp/m2nzni3ERqrGKHmcnD2DofYD4n0rigrbhVvmqpQAcwbpPCcT0IoH4nTaRLdP1Jp+pf950wrOTl8KVLNYnPuo6gieM/6i8dfVccu4dSzLp6CAyg4BOM9Pznv9EtKMNQnP6uit1E+U+NSr+LNY6HIblJ+/KJ38fbz+TiPPxETs4kREBExEBERAREQEREBERAREQEREBERATZPrH3ms2r/ALxfvA+r+E1/9CuPYT1SqeXeeX8Knl0C/YT0RvPJgTxZ/wBn0MP6xtUGN+R0nUOQgz2lfh1QC853z7y+5VUY7ETLcc2xc2q4lhBvK1douY8uMA5ltJzrdSqJMg3kaCTKJWUqCToJEgk6wiRRibDpMDrMyo1bpIWOTJjIXEohYyTSavTaey2rXUtZo70K2FBlkPYiRN3JMo6q5iproKm5h6ATjJlSzc5QvTpbbqjp2S1alc8wO4zsFYe/WcApqU4I1OlBqZ8qzk/Up6Y/Sei8JcD1y6Zl1SY1F1jPblh6TjYTmajhn9l8et0w1BuodRYgLZC/H7zl63d/TphlPqTgWk/B6Faz17zt1naU69pbrnSdJld1Zr6SwsrpJ1OJWEynabdJGpm4ktGZjMziYMitTNCDnM3Iz17TSxDglNyO0KisPMPntIy2cOd/eC/K3OOo6iZuTbnr6HfHtCt6+eyxa0YguQMidnWcI04Xyud+bGBzANKXCqaX12np1FwoDZdmLBScdFyfedzV6nTrrHVdZSTjc8wIH5+86YzjbnllrLTyvEuH1afU0NUGI7kk8oH298ybyiV9AH3M21+vp1L/AIehHuevK8oGxPbeR0JqKagtlTcwG7DoZl0nSCyoHLE5c75Mo1o9erW1v7tDlj8S/e7oRzqlfy3ec+5b7nULYOXpkHp+UmmpUCvVXdqqVtWxbCDWo3K/f2lurZAM9BOJpyBxSxe/Ng/kZ2kMirC7yQe0iU9JIo3ljnU9Y3lurtKtctVTTFW0kymV0kywiZTJFkKmSqYVIDJAZEDNwZESDpE1zM9oGczH3iYMDVpGZsfmamFRNIiNpM3QyEiTQ5/G6zbwTVVqASU2B7mfPLdY/CGFus1VekpHUk45p9I4mVGhcOcKcAz8/f8AU3UWHjlWmDHykTmUY9zOvhm3LyZam3oON/8AUbhK1MukrOquxgMF5R+s+WazVWa3V2am0gvYcnAwJBmJ6scZOnmyyuXZETErLMxEQEREBERAREQEREBERAREQEREBERAyFLEAbk9p0uHcOe69fMRgM7ZlXRFRqk5umZ9B4UlFilGQD2Imc7p0wx27PBNMKNGqgg7Tq4wJX0mmWurCNtLWfTymeW3l7MeI63D1zp8gzXitvkaJz3I6TThtnoIMn4jpfxVIU/T3mK6YuTwYN+DDP8AU25nXrGBK9VIqprQLiWVmGqmWToJCksJDCVBiTqJCkmEqN5nEwJtA0PSQv0IkxkNm2cy7EIQO3Izcue88xravK1zXLe4apsjG2CO/wCc9HaCQeU4PY+0g4rp9PxDhWu1tYXT6pKgr1E5DnoWX2kst6JdXl5HWcZ4nqdJXY+pZXssCDk2BHY7S5pdJdpbhZqdQj3uAcDriXNRwbTf2Fp7ayzW1WIeXsCdsTjafSageIbbNQW5gAOU9pm271XbGT1tj1CDBluuVK95bSa25J0k69JAkmWESqZIsiXpJVhW2IxvN1GTMP8AwlJO69/iNIiZWU8wHMO+JAX5X5lOVM2FwV+ZW29pmxFuyR6SehhZ/tqypeu45WPf/ea6St2vCsNl3yO/xKxayq3yyNydvYzu8Gepzqk5eZ0XIce8uM3Vt1NoX4KdZd5mpbynbcY3CgdjKXEOGNpStYPofdWHtO4HWs/wy+eX1s52zOZrtR+M1FaVZBVQSxO01eEm730r6PSLpK2ZclrDnJ7yM32m5ucNyhvTg9J0EChguQcAY7ESDUaYrYzKAQd9pFmmmorrv0rLZYqL/KWPQzi3216Cssredb0XA9IMtahGf63CIO7HE5V7LaBy55V6ZHWFjnaNy3FAzdSGJ+879U49VQTVqw7nrOxRsczNaqyvSTKM7zRBviTATUc6kQdJZSQIJYSVhOkmWQpJlgSCbiaL0m4MCQGSAyIGbgyCQGZmo6zOYRneYiYMDUzQmbGamFRneaNJT0kfWSjj+Ib1o4TZY2SoIJwpO32E+Af9QuKaLinEtM+kvS0ohVyAQRv03n6F4kCVQKSN8z5r4x8G6HiOqF5Xy7mG71jGfvO3iuq4+SWx8Sid7jnhi7gymzzksqz26icGeqXbzWa4ImJmEYiJmBiIiAiIgIiICIiAiIgIiICIiAiIgZBxO/wfjVlNyV2N3wDOHQivcqt0JnqtLwPTWOm4DfEzldN4y/HveHX+dpVbvidBF85wud5y+GaZtNplTmyAJ1aXxZ0yZ5b29uP+3S0+nFJCk4f2l6ywLXkzl6bzH1PrJ6zqGtGHr6TFdeIrH1Vq+O83QZ3k9yAKEQbCRUjOR7GYPiVBJ1EjVZOqwykUdJKs0UdJIsI2HSZIgTPYyiNtpXfeWH6Ss43gQtIRpzqb1qQZZiBtJmlS/mVmdCASuDkSy/s1vp0OK6GlvCnEq6rxRetTY5/qb4x2ORsZ4/gptu0qXahmstZQGZupxtM6vT3XWFrbnY4xgnaXNFUKqlQdBJllurjj64626FXSWklesdJZQdZBMslWaKvSShcCBusnTrj85Apkytg57SidcDAPT3mhbmYgHDLsRDeoc1ZB9we8iKhhgsQQPS3dT7H4lZRajTB8spCt39jI6nNe1mR3BkzuwU8+D7OOk1tKiou5BQdh3kbi9puHrrdI/JaouYenb6R7yseH2cP0VjFybiCv8In056fvLfh2xTXe7IBbe2EYew7fEtaq38Np7L7eRfLU5xv+s6a42zu+2nEZ9Rqq0TVWfxKh9OPy395lKvJVQpyzjb4E207ae5g+H5htzHbm+JeFYfJKBWI95jt0t1wiC5UMT6htn3lHiOptprK1sFPuJ0QpUHrmc3iNeQF6wk7cWtWu5nclm92mjqAuMy0FWpG5zgCUGuBfA9QJ2xEaYVP4yffpOjUOk5p5l1VfNy7sQBmdOmKVarHvJ0G+ZCssJ0ErlUyiTJIlkyCVlKslWRDpJFOYRKJusjWbjrIqRZuJGJIO2IG46TM1mQYG2ZjtEQjUia43m0YgRnpI26SUiaHEDlcSuWt05lJHX7TjcWqGp0L2IQSg5hiW+O6haddRzByOQ7AZBGe8oWvTfS4qdkDjByvSbwZynD4T4t4kdRr30yE8lZ9XyZ5ues8b8EGg4m+o07iymxsED+Uzyc9eN3Hjy7YiIlQiIgIiICIiAiIgIiICIiTYREShERAREQAJByJ6Xw7r7bNYK3IM81Ov4eQniKsO0zl01j2+qad80KRttOhoXU2nnAB7Gcugk1qO+J1NFQ9r7dR7zyV7sXZrtrsf04DCSgE2ZP0jec5NMy60Xox5TsQZ0QdsTG3VaTD7ncCQpWKnbY8pORJKjzOEWXLkHIARkSdp0rqu2RJVG8woCribJIiQTcCagDE3WBsJmAJsFlREwlewby24layBVfvKl3SXHlO3fIkWOZqMZEmpX0D3kN/98AJarGAIWrdS7D3lutN5XqEv6dQzdN/eETU0EjcQ6Kg3xkGWkHKAeo6SDWJstgHpJwfvNWM75Vm26dMzdbex6zBAIEjxzfB6SNRutjVNzAZQn1D2ktpDqfUT7GQLaUJVhhu4PeSpp7ra/Mprfyh9RAOB+cQvDRFYvyMwKn+b3nU0PB6tTpqtS9nNWTtUBtttvPNa7iKaW6pT5nks3VVJyfmep4ZxrSarR1U24091Sg7nCsP95vGS9p5NycOVr+DWaYl9GS1TNgLk5Ejstv0ujs0llaMbetjHPLOtbxHT6qt69JzMVGWJGBOW4fWVkE55W/UReOmsN3tZo0ysFCqML0lwV4G+8joHIgXuBJgMdZIl7Am2/aU9dSfRmtyrNjKjMtWXBCA2QuRnHtLq26ZmJQlq8ekjvNTHaXL1ec1mn0tS2I6/w22I6meT4jVXwzWLp6ndgUFgGd13x+k9nxBqK9fl+c89bEhcH1D6TPB+JWva2u2+yqpWHleaoOw64ls4XDLljSXNZrqH35XJIz3+Z6KobzzXBnrD+Xkc6vlR1GMdRPTVdMTne3SradZYrlavaWkiOVToJMshWTL2mmUgkiyNZKsI3E3A3E1WbjrA26TYGa9ZkSKkmRNRNhAzMxGIRgCZiJRGcSMyRpGZKPK8e5m4ioXJAQf6znvlNO3OCD2kviHVGvX32JbyGtB2z2nmRxzVauhqrChwdmA3xOmMZyrg+IKl1OlvrOApB3JnyxhhiMg49p9F8Xa86ThvlggWXHl2P69586PWenDp5c+2ImZiaYIiICIiAiIgIiICIiAiIjQREQEREBESbT1G64IN4o20lA1GoWv3nseCcEOmvW1TzL33kXDeHaNsBhh13DYnpNJo2p3rbmUzjnk74YOzo1HMD3E7enw1hZFwpGRjtOPpkLbjqBLGlusr1BU82/ScK9UdQBkO24JkxIAB7zVA3MucFMbj2MYCkkn0jec67Tpf0g8tedvqMuo3Pv2nNrs5gBLtLbgZljFbX0hULLsxkdLZOD17y5dgqM4x7SBdPyjnA3PaZpOm4GekkWRVEk4xLKiErIEyR3meswcCVlG0rW7Sy5646yrZ0hVWzvKtm2ZZsO8rms28yiK1HMrrN2pwOxnVWlK8AfvLGh0i1Bthuu+febamtRuNjtkRpLdtK1DMcCW0zXuDsN8yppwoHNknJlwONxntCLtbB1wRsZpzB/MpfqO3uPeQLZyIcdBMc4d1dSAwH6ia2mkVmabCG3HY+8jc/wCDfJ6Sy7BzgDOeoPaV6AHvFWnxZYxx/lEy22tZPJzY/TsBvO54d4xnhN+nIVQp9DDvnrmQXcIpNPKRu6FVYnqfecfT8Tu0PNp6dNQwT+GxyTkjqfvOmP41jKTOadXii+dor1rZQdjzgfkf2nEprHN+HYANtuTsPzl/X6l9StJoKoj+pwp6AdSZvbphZVyruD295i9umPE5Q6cLX6aDzdmJ7y7ptOlbc6k7ncEylo1auzkPY4/KdlVATrEMrpgIPfHxM4z1kbNy2Ioz6jv7D5l46cJ5gCmwLsze01GLddqjVedmtccxH6TmG7XOw02jsQio5YNgcqnv8iWadWiagIlVz2YJDBdpzeIqVWzVB7KFrXYVLkjfq0f8bk/bnLx3T2vqV1T1i6k4FiAkWCcHiOrq1la00nCkl+exdgfYTp8R8P1UVW6uy8h3BY8qhQBjrj7zyOkay7BYkEbqB3+Y3Z21MZeYn4WM8V0zMTzKSuJ7OveeX0lWNXTay4JPvPT1HpMXsyXE2lhJWrO0spLHKrCSZRIUkqmaZSiSiRKZKsDcTcbCaCbiEbDebTUTYdZBuJsJqBNxA2AiIlCYJgmakyI1MjbabkyKw7EwPm3irUq1+prGxL9fieb0gI2YYz3l/wAQ6pn1b1tSVtLnlJPUZ/ftOTr9VXw/hj32OQwT6c9/brO+M4cs7y8N4r1p1XF3rDZSr0jfv37zgyW+5r7ntf6nYkyKd489MxESoGIiAiIgIiICIiAiIgIiICIiAiIgJY0Vgq1SMdhnErzatS7hV6kxVj2WiuFVwLfSdjPWcP8AoBU5B6TzXBuH22adV1G5HQz1/DdF5QxvgTz516cJXQoIrIb4wZvo2P4whyMA7EyRaFasZ7yN6wLAAcHvOO3okdYM2ptY0gBlBJHvN0UtUOdcBx0lCnWnSqEQZcy3q9RZ5dViKCpGdu0l/bc/SWk8h5e/SdDTn3nK0162jmOzToUNitQe8wuUdDPPYFPQSZmCrgDeVqm5VLEyVDzEkysabKmwPeb532mC2B7TCsD0O8glDbTDHbMwCFG5kd1i4OJqMtLH6ys7Zmt1rdukgewYOTDWmtrgZmtdmCB02miBixLbjtNWsVXwNidoVe01u2fvNtZ6ioPtK1TDlYZ6Se0G2gMu7L+8VNIq2wu3brNq7clievtIa2AJwOsw45Lly3Kp7mRrS4LhgjsR2mNO6V24uYLXkDJ6nMu8G0Cai6132RByrk/U0r8R4OljotRrpsqsxYWbdge33zLr6m5vTr6zhunqp8xjmvAGxxjPeeYtu/A+Iuc28lY5OYoPcbjE9FoWr4kH0NNh5kUApzdhK/EOFV6wuyOKrOUcxYbNjoZvKcbjOGWrqrlmv4a6PZptQ1lqj0ggjJ/OcizSNXzXPjnsyzfczWmg6VijsHwcZA6/Il+9hYh05G7KcH8pm3bUnr0o8lFlfp2U7ZB6y4lm4yxA6Tm6NGVbVYZAXP5iX9EUurGRiSNZRb5a2PMOvuJoutR7H07KVdlIrPXJlny1rUEY3lDUaYvaGQkMNwekrM1VvTvXp6Ut81vOJ/u8dBOtXfo/qZX5iTz8p2xPIX6nUgPS9rAt9RPX8jOnw/VctPM7uGZcMoGciWZQy8d014wW0fE69Vojy6axQpUHoRn9Np09HwqrivDRZXcGDr61UgkHuGzOPxC1Tp2yospLYRQMEHHQ+4nA/CaivzbWd6Wc5bDEA5+I9sZeV9MrjxdKHHK9U/En0DWZpq/vK1IIz2BP2nJvddLdhQrgdh8zu65K9JpmCIFAG5xuT8zzFSNZmxiSc5PyZi88us4mlnQ8x16h2ZgDhcnp8T1VR6Ty2kP/AK2s9uYCeop3A9orFXEO0soZWSWK5Y51ZSTLIEkyyspVkqyFZMsqJBJBIx0m4gbCbiaibiRGw6TIM1mwIgbAzGYmCYAman4gmYgYYyvqG5aHPsJOTOfxWw18OuYHfk2geR1nDm1mmsotfr9NgUZT2M+MeMOB8Y4NrR+PufUadzmq4fSfgjsZ9rrtLLyvnm75lfi/CtPxjgmp0DgfxEwpP8p7H9Z2wy1XPPD2j85zM6vGfD2v4HqWq1VeVBwLE3U/7TlHrPTLt5bNMREQhERAREQEREBERAREQEREBERAREQE7nBeD2aq1LQdhvic7Q6db7PV9I6z23BKqqqwivuvT7TGV1G8Md12tDpHrABInotMCqjABE5WndThSQczq1oAoIaee168YmK5XGSJhU82wDv8zFrMg6STQubnIxvOenacRV1FTJduDkb5nSTVLbTVUqjLA7j3h1eu4cwBHSVDRSdUtiFq8NzbHb9IN7XdHpHWxhcwBPqTHcS/pba7HNRPqXP5yhezLdUQhZObfHYHrLI01dN3OxKlXGGHcGSxqf7dK1WXlGDyAfqZIloGARiUdH+NteysDm5WwSTtLV1bJSHsGB0JEmi66TG1dsiY5+U/Epm5SwCtnImA5ZuTMJpas1IxkmQm7K5MpXGzzVUnCZ6yPUakVuwJ6bCWJpZt1ArXJMpLqPMYsfpzt95z77nts8sH5PwJuLQEAXoNhG106PnZAkBbm1i56IP3kRs3AHtvN9Gov1IrewV8wLZO8Ui0jeWxDDHMMgnvLtFOpdHetCQBze0r6hCloqN9Qq28pnHUe/xPTXagCuqobBQB5inAxjcyyftnK604vDWTWa86dE/ishOS2MSbimg09OicMX50oZ/MBzzY+PvKKaqiji7au0cvMvOFBxknbaY4lxJbazTSrBbhyMWO4+0caNW5cI+C8WOnstr1LWYwOUIe/Y/pLHG9XQ48vAYlSeuSrEdfy/1lbSaUUZboMk5PWdDhPDaLa7b7kVy55UNh226zU60uWpl7OVwbidfCtUrWK/rQgEd56YcSqtqLJeOby8jI+ke33nD4zRp7xRY3MtygryqNiO35zbS1eaic6FMD1D+n5ye2uC4+35Vft1As/i8oznCkDGfmTri4869cECUbg7NzEYA+kDtN6hnDA4+0jWuF2mgIjFu/UzUPSjKoz98bTak5tSqwseY9J2DpKLdN5BT0D36yybYuWu1ek6c0/wAZwAc+otgCRpQ1ZZqyWCsBlmGw/wBZT4Ddp/x+vSwbJZyqH3wBmdE6IX1WX03gqwxWqjAxnr8x3CzV043E9La15uKlQmx+cRo9Ua6bbjhSw+jkzt8S/q7bLM1ONwOnxIW8rQ1M1m9zj0VMNj9/iYdJeGWWlGrtIYZHpU/zGcviHmarVV1k7BhhR0WTtczW18+Ccbn2+0ytfNqGsA2BwI7OuXE8RIfLAXrY4H6CcBqBUrKvRRPZcS0ouZX5c8mQo+TPM8Tq8mo7YL4lWXhytGCdXR8MJ62kTzGjH/rKABsTmeooGwkvaVaQSykroJZrljnVhJKsiQSVZUbrJlkSyVesMpVm6zRZIIGwmwmgmwhGwmZiZgMzBMzMQMEzGYPSYzCNWO05PGrOXh7gnGSBmdRuk4PiN1/s8KxADWAbwrlVgseUjbsZOxVVJxjHUzhLxN+Hnk1SHyeotBziW7+KV/gXuqtrtQA4ZTNyFscHxfq9I3D7LgQzAHKsv1CfE3PM5IGMnOPaen8Ucct1eqtpLDY/yHpPLnrPThNR487usRETbBERAREQEREBERAREQEREbCIiAjETp6PhY1KglsE/Mlulk2xwVXfWhQMqes+haWjTEAGvGBsRPOcJ4amkclm3z1nqNOFCAZzOWd27eOaXtDoFd8C0gA5nbGnasDJyPcTkaIP5ma1YjvidivUNzBScj2nGvRi1uUMCM7zucF4eLNC9zcpY7qft1E4dxrcjflzsTPSaZG0mjTTk55RnmHQyY9tZdcIbqcliADyjfE5F4AVOQAnGTidHUeb5dnK/JnqZxKi4sLE57YkrpjOFymxgVO+3vLV+qCU2ADJI3lTmAOZY4dWut1oQgYCknPxIvXL0ekvqHCqLVoKuyesg9SJz9XrV1brpk9bnBIB7d5ZtfFflqSq9uXtOPRw2vS6z8TSzcwzsTnY9ZbXPGTmruqqRajUuFYeqt8/liWdPwzyNFyauwu5OzId1nP1LVWHlsQtuDnPsZYXXhiqqpA779BI1d64RWaa2vVGs2c1JGUY9ZT1i6ZATax51Kmth/MD1EmGoNl7Wb4zyrOfaFs1Nemcq1Teg77g5ztDU39YegJzOK3ZW/mA2M10lPnEXKjBFPKR8zq6leSpDTzotYx74x0lPhOt/G3OteWeoHmJGFLdhGmd7SrUo1Lmxa3q5uUA9RNOHaf8OtepIHqAUNY2+DsD+s6I0VdXC77dbX5lhVhYa8nAzsVHvJOStV0Tglm0tb1uv+IKAQcH8osJd8ONrj+Kfyh5dihMBs7KxPTadWi3U6fR0U2YudEwAduQf6mRJw+unjOrsC4rt5bVHsG3x+s6RA2AUbjcESRu64cirT26i422AdMDPtLiaWtCLGQFx0OJYsUV4wAAZY4c4OsywyFBOMyyM5X65XEX1GnRG8k8rA4IHT7yPh3EzRpn5yHJOEpJOR7mejJp1HmqU9BGGV+hnL1/D/L1DW1Gv6d+QYBl0ksvFc86i3W63lTmWsLkgjCridAAkKEOFXcL7n3kDny6kKj0gZI9z7ydbF5A6jaZjdWK8OpVh+RlnS6apbSH7DKg9PvIKcOwxuZvY1tLh+RjnowGcSxi7vDqGkPahTlFhGA+N1lvyb7NAa9Pcou6LYRnP3E81fxu5NZpq66dgfWR3J6bT2Wh09xcCynlUjPN2nSavTjnvGS14Ii5b7QuEfzG8x0OSzZ33nR4Y99Z8qqw4Y55euJLr9KF4zrFUoKufm5h0APb7wbDpWC0gCp9mbuZy64encyiT8SqNZzeqzGC56AzlXPZrh6s+algzn2lshQW5PuR8yuu2oZydioz95NEQ3Pi9O4z/rLaYVHYnPr7SpcM3AYltl/guVO4IliXpzeLao01K4t5VYhR9zPN8T1S625Kk6V9TnYmdjjyi3RVIG5bA+Qczhvwm3ksCWiu0VlwjDOQPaF1waOprtYt4GFQ4x/SekqG085wjVmyw04GS4YfYr/uJ6WsReyrFYlhBIE6ywgiOdTr2kokSiTLKjcCSLIxJBCJVm4kayQQjYTYdZqJsIRkdZmYBmekDPaYMxnMZgamambHpNGMCNjPM+KgtunorZiBzltvtPRucCeT8S6jl1dCYyvKSYg4opynlu/OmP5p5nxPw78Dw+3UaYknGSqnGfynrVCsvMvqXv7ieV8WaytNFbUSVPKcETrhvbGfT5Pa/O7N7nPXM0mWOTk95iep4yIiAiIgIiICIiAiIgIiICIiQIiI0E9Lwv8AiVIwPQTzU6HDL3rdlViO4krWPb1NKMblHbM9DRWFUCea0OuVrFD7NPQ03ADr9pxyejHT1XCXXT6dsKOftmLua+8W1qFI+rE5On1bOVVQZ1tJaPKsycH2Mxa64zSlfY1dgWwYye3Qzs6HXak6VUAHKmwJnJuUWOARsN509OhARR9G2Zh0T2/iNQCtboR3GZXXSuj/AMUhe/WWTU1V3OoG223eR6h1YKShJHSRqVlUQLhRke8aNjVqhai+pTjbvKq3GzYKRvLWkIDkZ9XWFdJtTz5K/niQpeEwX6e82uXkYgryt3nKsuIt8tyBv+0VJF2x2sfCA/Jki1ZqHlsA5I5ix7SKtQFsr3QkbN7zGmuf1KTzv3IGwEjRbpwNcnPW71NgMEPabvpdPUwsqUsEbYnciaG5jc9TkhezA7zGps5NNnOAvU+/3hLvpZN5F3KCCjdcx4etr1XEdRVUnMhVmrb/AAkdZy21XOoCj1EYzOl4fFf4/Kk+fVgpWDjPvLO2cpqPQFqbGsFWE5RggHbmxONqia7LG5Hu1VyCoInRc/V+wE6etFS32eWfKtb1FFbc4nB4lbqvxdCUnlttOw9l9/uZrPlnxTVWNNajcU1KAscKBv8AEvcxLhyTjticlRdpUZgMl2y7dSPiWK7mYeonBnOO1dWqg61xUhJbrgSHiOgu0tFoosCvXg5brnP/AOJnhOsFGpcmwIpTfaS8T1iX8w07C97E5WIP0j/eb+Oc37aczTa/X16mwXlWutQc3TBHbH7y7UzBjW55icsx+/aU0p8kPkc1pADN7SzpAz4BUkDYNJu1qyN/IJHJUOYe3tITU2jOLRit9gfaXtGQWtUKQVOCSNjMcVt0/l16dyDZaDy79Md/iNMy3elOzWLpDVzLYGZwgKrkZ+faekuai3RvqOYNWFyVBwc/lOdw/htfEdIy33OBS/0oR7bZlknT8J4g5sYItoHKrdBt0GJZv6Wy9duA9XPeXQlSGyu+4HadrQcS4m9dlFuoIRv5iNwPj2kOrRdfaNXTha1BHMwwCZXGpNli8qlApwR/iPuZn2svDVky7T3jNvlqCleMj5+ZKEzUw3yR98TashqiCep2kauRYUOMnpGk2h0tnOzZ6gYMjJ5aec9nwZNXVyai7GeVjkfpIrEZtM6Y3yGEml7VnZGZuUjY7yvqNU9OmIzgcxCn3OAf0H+00utFY+nfHc4Eqai2vUZ5C+aqCFX3bc/6Q3IUadte3OASCeb1jf747S3qUpRbNfp1DXaeoq1bHHMOs04PdTp67tW1mVtVS4JwBgdpY4m1N3CLV01aFtSVXn6cxP8AxN4zhzyyvtp4fgdltnGMWryMHYsuMYzvie3Seb5dMfFoOmYsVr5bj25xt/SejToJnLtb0sL1lhDnErLLKRGKsLJR2kKmSg9IRIJIDI1m4hEgM3BkYm4O8okHSbTQTaEbTM1jMiNu01zM7e01JlCRsZsTNGO0ghc7GfPfGFqni1asbKwtYAtXscz6BafSZ888V/iqdf8AiK1FunbCWKf5fmax7S9PN6zjmu4HT5tyrbURtYg6zwXHOPvxawsFKg9ftPccQsFOjsR0FmmsG6ZyVM+X3AC5wv0hjj7T1YPN5KjiInRyIiICIiAiIgIiICIiAiIkCIiXYRERAnQ4ZTzOXPQTnzpcOuABrIO56yVrHt39FQtz4A2HcT0em0gVlyTt7zmcKCopAG5nfqY7EDpOGT0YuhoaAM5GJaOAfT1MpLqMd8GWqrPNQnGCOs511g3MBt9U6WgywAJyQJzDaAQOplvhtp885A37SN/HS1AJHoffPSUnZlO+ce0s6l0QHkBzOXbc+QCx/KStRKLMqdiAJNSUJyG3lVXJzk5kyKAoYnHtiRpcs1dj01KG52z16nEh1FCW5HL6lPXv+UlFIqxYmSzCZttCuiqM2NsF7kwkqGyw11VqtrK2NnJ2U/MsaDUVrdcarfXbnm226TTUaHnqVXIXI5iD0P5zn8v4biCov92pzgHI3+Ya4sWdbcK7+foMb79cyfhH/r7mtsBKVD6cZDH5lLXVG3lrU5IGQw6ETq+HEWrRaigsy3u/0/lsREm6zldRpreHpz1tp6ih5sll6e+4/wBpSbUtpOIJrabAjWVnIHTInoda9aXpWAy830jPSed4pozqnY0L5SpaN/uNx/rLZpMbucuynFl1Gm8/UPUlwXlwAcke85td9p4pVxLULhOQIq5+kfMgGlCJvduOg5ZaNor0SmwcwYdB0MzbWpJF8KpJUMCHBweuZJptCz1s7BhydRjoJzdYCmgDIjjlOcHbG06FHEqP7NoNmq5LCOV0Ub4HvLIlt+J9Vw9CocXYLH6FHb5MzptMlRRFAAyCfn7yknELHPl82Rz5LdgJYW0lucPj2hdVcspYZx06zWgnT5Uk8rHIB95gakOo5myfiQanSajVlLaLFVq89TjP2jekkvVdihqj6uU7nB5RvmWLuA6fVW137rYoxzY6DrPLU23ad63W9ubPNgnqZ7WvX026erUWaqpPN6oGxg9xNzVjnnMsOnnk/G8G19yV28uccrYyLF+0sWPqOJXLqOIMCiDCqFAC/aS6+6rU6sXuwNdXprUfz+5H5yrXa9tjE+kL0UdAJjK/I6YzjdnKa7UqRyVpy1ADlQbgff5kQVbG5h6WHcdPzm5pWwb+nvkTFmVHKi8ok0u01TZrKnZhK+cWA9wZup3BPWav6TzbwiyCA65PXqZgKADhS2BuRNl5VUMx2J9PzLKVKxfORykDlBxuYRyNboaLaLLLlZSqE5WeW1+nr4OKzTqTcmsTKWOMcjLvsemCCRPpl+lJp/iJlccpXHafO/HbPp/wOgWqpNGxLVlfqyNsY7AZluPC457ukfD9XpqtPbgl6zuEZeh9pBxvXX06PS8mp8u4uGFSADb/AIlOlvKqxzsqfzBT1+JFaWu1Be8ltvTzfyj2klbyn1vweoDW+YSSxyST1OZ6pegnnuGrjVjHTBxPQqcqJL2l6TJLCdBKySykrknWSrIkkqwJBJAZGJIIRuJuJGJuOso3B2m00E2zCNozMRIM5mpO8zmamBgnaRsZuekiaBBa3p6zyvEK1sttS0Aq/wDSel1Bwpnk+La38Np7CqiyyvcL7j2msO0r5V4zW7huqFC2MpzlSDsR7zxbEkknvPSeKfEFnF7ErtoQeXuj/wAwHcTzM9mPTx5XdIiJpgiIgIiICIiAiIgIiJAiIgIiICIiAnZ4YqeUGHKWz3nGm9djVsCrEfaS8rLp9F4bWTXznAnWXddzuJ5LgevY/wANnLKQCM9Z6QPuCD1nHKcvThZY6VFRtGwzLNOQCvQ95Fw9itLDPU5BmfOItJO8xXWJCpV/qxJKLjXejKd1kF1qnBGSZqmebm/aZdPjtPe146AMZBy8jnnOQ23TpNdHzNeuOg6y5qqyqZI9J6GNEulGxlqyT0HUe0nqYOitjI7SvdygNzYxjpGnvDgKrbDeRpdv1DseVDhveVabhTrqrG525WDMRucTW+z1coJ5jLmorWvTgVsCAuAD1MEa6njOkvfm0rF1f61cHYe33kVAFupckEcydD7TnaCp9VrLeapUStuwnU5M2NjupElq9Kor1CXlKy2FOxB2xOrw3UsCeYk2Ic4HXEp8PdU1tmnsLKMekkbdJIlR03F2a5wodMqff3ERLd8O3+JGpXK0nmxkF+04pszrLQrEqWG/29pcu1Xl6ZKWZSzAkYOMjtOGXsa5HQemhSqnpzk9TLl2YdOraublYHYbZm9q1tVV5h5KRYA7DsD3lei5dTScD1HtJTVdbQ/NUwV9gD322mVd621fIKmwNWyE+oZAx0nmXrst4kpcVnlr/kI3zJ34reEXTqAx5OSwFcYx2+800tIr1bsy4a31cnXl2i0nC+dMLNNyVnBHt7zNFuFHMMHoR8yzp8qhVQM5zMvTU1oJJVm64HWVdt61SzOB6iNh2zJVWuvTl0tcsvNzgDYDt1mdBbo6LhXqXXLHYOMAyvxPV06bianRgMrLyMSeYKPgdI1wku7pm46a2ylKxlqQQx7mX9PyBABjrOTp2cnn9RdjuSN50ATt7+0jWUY1StW3MBlG6/EzVZybjpLFpBrwRsdjKKYSwp26jPtJYzLuLvm5bc4MwXBJLPnaV7jy4z0PRpLp6mepWZRnPcy7TX1vYCtP8M/Se8gOoIYI+Nz095btC5C5I+3eU7qFtPlWY58ZVsdD237RVjp6PRPr9OFawL5TFjtudtpa4U/kBK7FWxLzzZ6kHpvObouJcPtpZ79X+G1FTFAy/wA+JBwni1Oq4m2gdf4VTc1N1ZOST1++8s40llu49dZahvNdlTmwH0gbgj3nifF+n8/R8RtasJ5TLyc/x1x7ZzPQX8Q1p4brLBYum8sMK7r09Rx3x8z59xXies47qK31oWtUX01pkA/J9zN5Xhjx4WVzEfza0dVwF7d/zlggc2Sdj7yMqaLMgZU9pkOHGM5Ub5nOO9XdCgXVLg9uk7S7CcLhzZ1WO4E7inAj6xVhJYTOJXrllPaHNOnWSjtIlkqmBIOkkEjE3BhG4m4kYm4lG0zNRNhCNhExmJBmak7zJO01PWBhpC5m7HaRPAq6kjyzPGeJr69Cvnsp5SQCcT1urf0TzPiXSjiPCL9MpAZl2buD2M3h2zn1w+D8a8k8UvND81bNzL8Z7TnSXUVWUX2VWjDoxVh8iRT2x4r2REQhERAREQEREBERAREQEREgREQEREoREDrIOvw2xqXpcZ2PSe5rINanPyJ4arKKnwJ6bh+uU0qj9ANjOecd8Lp6Cm8+XyjaSKzZ65lWp1bBQ5B6ToIgCE9wJxr0YJa6+ZOYyVuTy8AbyvRqVW01tuvea6izcCoH33kbdPhV4Go5cbEHvLupt5QFPqU+3acGl9lbO8trqgagrZLA+/aS1ZOUWo57TYp2HISD7e010wxWDn85Itb2eZ6vSdsTK1+UpUrlAMASLtnfm3GT7yW7UXamgVMo5dhkdcSo1y83KvYy2g5V/wA0LvTanCIV+knpJPxGLq0wCznCyJ+UYyesp6yyz8XXbuEXGMdRIdvSjSA6kMAwyPUvWUuLEPhf/srPKrfn0nVNoQhl5ieXfbrONrlReVmzztYCm/6kzWU4THm7crQHNzWb48xlAb+XfpOr/DKMztyhe4lSutStgTb1FifknrIbLLGqeo/Vzjp3mWtuhpDXTr6lsJKs2DjuZ27dSunYit+QkqBz9ObPacJtNbQqO681wGOUdj2MrvqtbcoqtcPbX12HX3ll0mt8uhhf7QtfmDLzZJHv3lq5fLtFwB65P2lKmkVBCg2P1j3PvOnkWLjOQRjEyq5S9VIJtJIOSoHv2mRVda/mVKBVgHm7kzlNqbKKQUVSx9HqGcYnX4frrbNMAThejenYnHaak2xdxQ4lTamoopZDylefJHU9Bv8ArJdHWqrYeUFumSJd4wjv5V2nL+hPUGOx/LtKels57GrOPQMH79zJeK6S7iwRYDkN+QmarBzAdPvN+X0g/vIbE25l2I2hna9zkn/tMqaoNXajjp0klFv8rGb6heenIGeUwTtqliWDynxyvsPvLGl0tqAorlxnCkmcqu403KzplQc4nWr1As0vnaVy6hsHHVd5Fq+aENqUsc2Yyf8AeQPobV/FVuRyOpVMjHaduvUaRHVblau11BRis11WkszU41CsiklyR1nT14cMc7t4vUcKQaUWLWtNtYw9a9G/5lSvhbtcMN5Yxln9p6PWPVqGeuv0KrgtY22JG4WxVRf7sHO/Un3nLT1TK6c/X122aatL7LLET6OY55TPP6gB+hwy7bdp6vVkCkknBAzPM66oFjZUeVupGdjKkrn2EgDmIzK+6kkdD2k7OLAVIx7iVMlSV/OUro8LYHUt/lndQ9p5/hR/j2H4E76HaZ+s1aqlpOsp1ntLVe8rmsr1kqiRLJVgbiSCRibiEbibAzQTfMDYTM1HSZBhG0ZmI6wMkzRpnOJqxgaMdpC7YkrHEr2Ebwrm69/SO284HFNbpdPSV1VgRW6OTtOtxN2wgHvPMcb0+k1+jfTanGGG3NN4f7ZyfIfE702cateobkDm9iff5nGlzidLafW2UtYXCelSeuJTnsx6eG9kREqEREBERAREQEREUIiICIiShERAREQElopa6wKsilvQsBYVJxzDYxVjq1aQtgFgO06WlpqpOGt27TmIFTdjmTi+hh9O8511mo9RojXyAKcy/wDiHUEddtpwuEtkZC4na5c4LGcsnfG8GmDGxrW79pfRecnoDia6bSixObOAOhmxY0W9jjaZdNobVNdikdDLFI5mz27ypeGtdcdD0lmkMiHboZF26tFeaSw/ISC8MFDfyu3Ln5kmm1Fbadat1fJJOYuYkFeT6ehlvSTmqK0izlboynf5EuA7j7Sqtypby4OCQssB875Ey1WoryWPtMM9WpoUHIdcvn3Gd5PlfwxfcEH8sSxptOiUoxUAHbIGQR1g2gfjllGpQORZTYuOm4Mp65/xdgO5IAyfYewlbWmoc1nqc+YArdACdpaRQK+X4GY3trWuW6cppZDsCNvymi03WWV1KgODnPfaavy4wQQDLaMBo1sRioKkbHckf6Qlq9xNlOle+uwisgj2JPtORpmVnaysEKwG3ucbmTXuLbFZAyryfSzZBH2kaIOUWV9hgiSrjdR0kbJVQcD4lmi8c5rfAIPUmUKbDZXmvHN8zZyltuntKlrFb+JVnHMo6iRW2r1SLa9vMTSigkY6Z2nS02uNdK1MR5BPNgDfJE5d9upbh1iaPSqwVsOrYPlr7H5mdNattKBc8gHX2ll0Wbjq6nXebTyU8ygnJZhgYkWmPKQQfV3+ZpWEsrBKjImcGtgy9AdxFuyTU1HZQ81YI3Bm3JgdpSS1lVcDb2kpZnU8uSPiGUTuUsIUZI3+JYovNlIdF9JB3Px1kVZKk2BOY45RvL2iroXW6hCpaurHMD0JI3k+tXWla6ut6ucVls9wcSxpdDZToHTh1y6a+wnK3+oHPb7+0samxrKV/Dqq1AAbL2kOu09dmk59SgsKjHTrnb9ZfXSTPc0nPHuH01aPT8RRrLA5AsrYNy47nEulqlv/ABraxfwLgeTUB19yfczyN3DEp4kuloDeUauY838v5zp6ala6wqKQF7nqfmPY9J2kruFtli8p5C5K83VvkyTlKjCnb/DIXIDDAx3E2rsGcMZI1Wt+WrIcZU/tPN6/nofB3Q9G956TUenBnK1NaspV/UhODKm3nWw5GD06GR2J5gxtzDp8yXWad9JZ71sfS0rreBgnp7+0NbXuDoQ7lj7Cd1DOPw0hrXZdxtvOunWZ+s1bq9zLadMynWZcr6SsJ0kqyJZIsIkEkEjE3HWEbibTQTYQNxMiag7TOYRtGZjM1zCskzQmZJ+ZGxgasdpXs95KzbSta2xgef4vrq9NeA7Y26YngvEvHUTTvbSvPj6h7fM9N4opbU2OEbDAYE+W8ddqKXpf0udiJ38cjj5cq85qdQ+qve2w5ZpDBiel5CIiAiIgIiICIiAiIgIiJNhERAREQEREBN6/rX7xEXpZ26K9BLFPSInN0en4T9I+06/b8oic7274/wBXS0v/ALP85Wv+sREzW8We6f5paHQxEjSP/wC0fedM/wA3+WIirg5bf+5H+eSr2iJlast/7B/8pnS0n/safyiJYjz+o/urv/5Fkw7/AGiJmOlR6j+6/KXNL/8ADaf7v/WIl+MqOk+in/IJ0NL/APZ/mMRMqzw/67P80lb/AOSo/wAzf0iIaibQf/FcS/8A52/oZS0v/sk/yJES1cXW0/8Adj7f6Tez6G+wiJETV/3KS1T0b7RErLI+qr/P/pOnV/7nX/b/AP5iIna3pd1P/wAcv+Vf9JpZ/wC2/MRE1l05Y9qPEP71v8i//wC5mqdvsIic463pHqP7of5pC3b7xE0rfU/3QnPH1N9v94iVmuTxP/2Df5lnEb/2/wCYiJFjp8H6PO4kRM/SrNXaXq+kRKwnSSjtEQjcTcdYiEbDrNhEQMjpNoiEO0wekRCtD0kb9YiBE30ytd9JiIHkuK/+4f7z5T43/wDkl+0RPR4nHzdPJGIieh5CIiAiIgIiICIiT6EREUIiICIiAiIgf//Z"
| 62,100
| 62,100
| 0.974396
| 1,563
| 62,100
| 38.713372
| 0.980166
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151852
| 0
| 62,100
| 1
| 62,100
| 62,100
| 0.822528
| 0
| 0
| 0
| 0
| 1
| 0.999775
| 0.999775
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3d1efc572df240a9fb002841ad81a931f3a314c4
| 126
|
py
|
Python
|
python/testData/formatter/fromImportWrappingChopDownIfLong.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/formatter/fromImportWrappingChopDownIfLong.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/formatter/fromImportWrappingChopDownIfLong.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from module import foo, bar
from module import foo, bar, baz
from module import (foo, bar)
from module import (foo, bar, baz)
| 25.2
| 34
| 0.746032
| 22
| 126
| 4.272727
| 0.272727
| 0.425532
| 0.680851
| 0.808511
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.174603
| 126
| 4
| 35
| 31.5
| 0.903846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 14
|
1829f2310b132819505a34d44774b0ec98f902d2
| 9,032
|
py
|
Python
|
admin_interface/migrations/0006_bytes_to_str.py
|
Mustafa-Abu-Ghazy/django-admin-interface
|
a04878a1b3220e9e33e15f06cc2b7d075e61542e
|
[
"MIT"
] | null | null | null |
admin_interface/migrations/0006_bytes_to_str.py
|
Mustafa-Abu-Ghazy/django-admin-interface
|
a04878a1b3220e9e33e15f06cc2b7d075e61542e
|
[
"MIT"
] | null | null | null |
admin_interface/migrations/0006_bytes_to_str.py
|
Mustafa-Abu-Ghazy/django-admin-interface
|
a04878a1b3220e9e33e15f06cc2b7d075e61542e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import colorfield.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("admin_interface", "0005_add_recent_actions_visible"),
]
operations = [
migrations.AlterField(
model_name="theme",
name="css_delete_button_background_color",
field=colorfield.fields.ColorField(
blank=True,
default="#BA2121",
help_text="#BA2121",
max_length=10,
verbose_name="background color",
),
),
migrations.AlterField(
model_name="theme",
name="css_delete_button_background_hover_color",
field=colorfield.fields.ColorField(
blank=True,
default="#A41515",
help_text="#A41515",
max_length=10,
verbose_name="background hover color",
),
),
migrations.AlterField(
model_name="theme",
name="css_delete_button_text_color",
field=colorfield.fields.ColorField(
blank=True,
default="#FFFFFF",
help_text="#FFFFFF",
max_length=10,
verbose_name="text color",
),
),
migrations.AlterField(
model_name="theme",
name="css_generic_link_color",
field=colorfield.fields.ColorField(
blank=True,
default="#0C3C26",
help_text="#0C3C26",
max_length=10,
verbose_name="link color",
),
),
migrations.AlterField(
model_name="theme",
name="css_generic_link_hover_color",
field=colorfield.fields.ColorField(
blank=True,
default="#156641",
help_text="#156641",
max_length=10,
verbose_name="link hover color",
),
),
migrations.AlterField(
model_name="theme",
name="css_header_background_color",
field=colorfield.fields.ColorField(
blank=True,
default="#0C4B33",
help_text="#0C4B33",
max_length=10,
verbose_name="background color",
),
),
migrations.AlterField(
model_name="theme",
name="css_header_link_color",
field=colorfield.fields.ColorField(
blank=True,
default="#FFFFFF",
help_text="#FFFFFF",
max_length=10,
verbose_name="link color",
),
),
migrations.AlterField(
model_name="theme",
name="css_header_link_hover_color",
field=colorfield.fields.ColorField(
blank=True,
default="#C9F0DD",
help_text="#C9F0DD",
max_length=10,
verbose_name="link hover color",
),
),
migrations.AlterField(
model_name="theme",
name="css_header_text_color",
field=colorfield.fields.ColorField(
blank=True,
default="#44B78B",
help_text="#44B78B",
max_length=10,
verbose_name="text color",
),
),
migrations.AlterField(
model_name="theme",
name="css_module_background_color",
field=colorfield.fields.ColorField(
blank=True,
default="#44B78B",
help_text="#44B78B",
max_length=10,
verbose_name="background color",
),
),
migrations.AlterField(
model_name="theme",
name="css_module_link_color",
field=colorfield.fields.ColorField(
blank=True,
default="#FFFFFF",
help_text="#FFFFFF",
max_length=10,
verbose_name="link color",
),
),
migrations.AlterField(
model_name="theme",
name="css_module_link_hover_color",
field=colorfield.fields.ColorField(
blank=True,
default="#C9F0DD",
help_text="#C9F0DD",
max_length=10,
verbose_name="link hover color",
),
),
migrations.AlterField(
model_name="theme",
name="css_module_rounded_corners",
field=models.BooleanField(default=True, verbose_name="rounded corners"),
),
migrations.AlterField(
model_name="theme",
name="css_module_text_color",
field=colorfield.fields.ColorField(
blank=True,
default="#FFFFFF",
help_text="#FFFFFF",
max_length=10,
verbose_name="text color",
),
),
migrations.AlterField(
model_name="theme",
name="css_save_button_background_color",
field=colorfield.fields.ColorField(
blank=True,
default="#0C4B33",
help_text="#0C4B33",
max_length=10,
verbose_name="background color",
),
),
migrations.AlterField(
model_name="theme",
name="css_save_button_background_hover_color",
field=colorfield.fields.ColorField(
blank=True,
default="#0C3C26",
help_text="#0C3C26",
max_length=10,
verbose_name="background hover color",
),
),
migrations.AlterField(
model_name="theme",
name="css_save_button_text_color",
field=colorfield.fields.ColorField(
blank=True,
default="#FFFFFF",
help_text="#FFFFFF",
max_length=10,
verbose_name="text color",
),
),
migrations.AlterField(
model_name="theme",
name="list_filter_dropdown",
field=models.BooleanField(default=False, verbose_name="use dropdown"),
),
migrations.AlterField(
model_name="theme",
name="logo_visible",
field=models.BooleanField(default=True, verbose_name="visible"),
),
migrations.AlterField(
model_name="theme",
name="name",
field=models.CharField(default="Django", max_length=50),
),
migrations.AlterField(
model_name="theme",
name="related_modal_active",
field=models.BooleanField(default=True, verbose_name="active"),
),
migrations.AlterField(
model_name="theme",
name="related_modal_background_color",
field=colorfield.fields.ColorField(
blank=True,
default="#000000",
help_text="#000000",
max_length=10,
verbose_name="background color",
),
),
migrations.AlterField(
model_name="theme",
name="related_modal_background_opacity",
field=models.FloatField(
choices=[
(0.1, "10%"),
(0.2, "20%"),
(0.3, "30%"),
(0.4, "40%"),
(0.5, "50%"),
(0.6, "60%"),
(0.7, "70%"),
(0.8, "80%"),
(0.9, "90%"),
],
default=0.2,
help_text="20%",
verbose_name="background opacity",
),
),
migrations.AlterField(
model_name="theme",
name="related_modal_rounded_corners",
field=models.BooleanField(default=True, verbose_name="rounded corners"),
),
migrations.AlterField(
model_name="theme",
name="title",
field=models.CharField(
blank=True, default="Django administration", max_length=50
),
),
migrations.AlterField(
model_name="theme",
name="title_color",
field=colorfield.fields.ColorField(
blank=True,
default="#F5DD5D",
help_text="#F5DD5D",
max_length=10,
verbose_name="title color",
),
),
migrations.AlterField(
model_name="theme",
name="title_visible",
field=models.BooleanField(default=True, verbose_name="visible"),
),
]
| 32.489209
| 84
| 0.481178
| 737
| 9,032
| 5.654003
| 0.128901
| 0.12959
| 0.161987
| 0.187905
| 0.856012
| 0.850732
| 0.832493
| 0.807775
| 0.778258
| 0.688025
| 0
| 0.035236
| 0.412423
| 9,032
| 277
| 85
| 32.606498
| 0.749953
| 0.002325
| 0
| 0.750929
| 0
| 0
| 0.162615
| 0.065268
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011152
| 0
| 0.022305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1862fa6497570d262db90c90146008b261576f76
| 1,627
|
py
|
Python
|
modulo 4/poligonos.py
|
antonio343/clase
|
fda04a606246695aa5d93c8b2b5e2890a16d5973
|
[
"MIT"
] | null | null | null |
modulo 4/poligonos.py
|
antonio343/clase
|
fda04a606246695aa5d93c8b2b5e2890a16d5973
|
[
"MIT"
] | null | null | null |
modulo 4/poligonos.py
|
antonio343/clase
|
fda04a606246695aa5d93c8b2b5e2890a16d5973
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def cuadrados(l,n):
import turtle
t = turtle.Turtle()
t.up() # lápiz "arriba" (no pintar)
t.goto(-150, 150) # movimiento directo a coordenadas, por sencillez (no es lo habitual)
t.down()
SIDE_LENGTH = l
angle = 90
i=0
while i<n:
t.forward(l)
t.right(angle)
t.forward(l)
t.right(angle)
t.forward(l)
t.right(angle)
t.forward(l)
t.right(angle)
t.up()
t.forward(2*l)
t.down()
i+=1
turtle.mainloop()
def poligonos(l,n):
import turtle
t = turtle.Turtle()
t.up() # lápiz "arriba" (no pintar)
t.goto(-150, 150) # movimiento directo a coordenadas, por sencillez (no es lo habitual)
t.down()
angle = 360/n
i=0
a=0
while i<n:
while a<n:
t.forward(l)
t.right(angle)
a+=1
a=0
t.up()
t.forward(2*l)
t.down()
i+=1
turtle.mainloop()
def espiral(l,n):
import turtle
t = turtle.Turtle()
t.up() # lápiz "arriba" (no pintar)
t.goto(-150, 150) # movimiento directo a coordenadas, por sencillez (no es lo habitual)
t.down()
SIDE_LENGTH = l
angle = 90
angle2=360/n
i=0
while i<n:
t.forward(l)
t.right(angle)
t.forward(l)
t.right(angle)
t.forward(l)
t.right(angle)
t.forward(l)
t.right(angle)
t.right(angle2)
i+=1
turtle.mainloop()
# cuadrados(20,5)
# poligonos(20,5)
espiral(20,15)
| 19.60241
| 92
| 0.508912
| 234
| 1,627
| 3.529915
| 0.213675
| 0.106538
| 0.098063
| 0.108959
| 0.811138
| 0.811138
| 0.811138
| 0.785714
| 0.785714
| 0.785714
| 0
| 0.05038
| 0.353411
| 1,627
| 83
| 93
| 19.60241
| 0.734791
| 0.221266
| 0
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.045455
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1866a5b88df7632ff884d28c3250332860e5b11f
| 96
|
py
|
Python
|
ismo/submit/user_group_id.py
|
kjetil-lye/iterative_surrogate_optimization
|
f5de412daab1180612837f4c950203ad87d62f7e
|
[
"MIT"
] | 6
|
2020-10-20T14:03:50.000Z
|
2021-11-02T15:08:55.000Z
|
ismo/submit/user_group_id.py
|
kjetil-lye/iterative_surrogate_optimization
|
f5de412daab1180612837f4c950203ad87d62f7e
|
[
"MIT"
] | 3
|
2020-11-13T19:04:10.000Z
|
2022-02-10T02:12:18.000Z
|
ismo/submit/user_group_id.py
|
kjetil-lye/iterative_surrogate_optimization
|
f5de412daab1180612837f4c950203ad87d62f7e
|
[
"MIT"
] | 3
|
2020-10-20T14:03:53.000Z
|
2021-03-19T23:11:34.000Z
|
import os
def get_user_id():
return os.getuid()
def get_group_id():
return os.getgid()
| 13.714286
| 22
| 0.677083
| 16
| 96
| 3.8125
| 0.625
| 0.196721
| 0.327869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197917
| 96
| 7
| 23
| 13.714286
| 0.792208
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
629be1fb71dd70ba5f66c017dc252cdb370c1f41
| 5,047
|
py
|
Python
|
tests/task/Task/test_Task.py
|
eragasa/pypospack
|
21cdecaf3b05c87acc532d992be2c04d85bfbc22
|
[
"MIT"
] | 4
|
2018-01-18T19:59:56.000Z
|
2020-08-25T11:56:52.000Z
|
tests/task/Task/test_Task.py
|
eragasa/pypospack
|
21cdecaf3b05c87acc532d992be2c04d85bfbc22
|
[
"MIT"
] | 1
|
2018-04-22T23:02:13.000Z
|
2018-04-22T23:02:13.000Z
|
tests/task/Task/test_Task.py
|
eragasa/pypospack
|
21cdecaf3b05c87acc532d992be2c04d85bfbc22
|
[
"MIT"
] | 1
|
2019-09-14T07:04:42.000Z
|
2019-09-14T07:04:42.000Z
|
import pytest
import os,shutil
def test____import_from_pypospack_task_base():
from pypospack.task.base import Task
def test____import_from_pypospack_task_Task():
from pypospack.task import Task
def test____init____norestart__with_cleanup():
#<--- create simulation variables
task_name = "task_name"
task_directory = "task_directory"
restart = False
root_directory = os.getcwd()
#<--- setup conditions for the test
if os.path.isdir(task_directory):
shutil.rmtree(task_directory)
#<--- code we are testing
from pypospack.task import Task
testtask = Task(
task_name=task_name,
task_directory=task_directory,
restart=False)
#<--- did the code leave us where we started?
assert os.getcwd() == root_directory
#<--- checking the attributes
assert type(testtask.is_restart) is bool
assert testtask.is_restart == restart
assert type(testtask.root_directory) is str
assert testtask.root_directory == os.getcwd()
assert type(testtask.task_directory) is str
assert os.path.abspath(testtask.task_directory) \
== os.path.abspath(task_directory)
assert testtask.task_name == task_name
#<--- check directory structure
assert os.path.isdir(testtask.task_directory)
#<--- cleanup
shutil.rmtree(task_directory)
def test____init____norestart__no_cleanup():
#<--- create simulation variables
task_name = "task_name"
task_directory = "task_directory"
restart = False
root_directory = os.getcwd()
#<--- setup conditions for the test
if os.path.isdir(task_directory):
shutil.rmtree(task_directory)
#<------- this creates an existing simulation directory
from pypospack.task import Task
testtask = Task(
task_name=task_name,
task_directory=task_directory,
restart=False)
assert os.path.isdir(task_directory)
#<--- code we are testing
try:
testtask = Task(
task_name=task_name,
task_directory=task_directory,
restart=False)
except:
pytest.fail()
#<--- did the code leave us where we started?
assert os.getcwd() == root_directory
#<--- checking the attributes
assert type(testtask.is_restart) is bool
assert testtask.is_restart == restart
assert type(testtask.root_directory) is str
assert testtask.root_directory == os.getcwd()
assert type(testtask.task_directory) is str
assert os.path.abspath(testtask.task_directory) \
== os.path.abspath(task_directory)
assert testtask.task_name == task_name
#<-- check directory structure
assert os.path.isdir(testtask.task_directory)
#<--- cleanup
shutil.rmtree(task_directory)
def test____init____restart__with_cleanup():
#<--- create simulation variables
task_name = "task_name"
task_directory = "task_directory"
restart = True
root_directory = os.getcwd()
#<--- setup conditions for the test
if os.path.isdir(task_directory):
shutil.rmtree(task_directory)
#<--- code we are testing
from pypospack.task import Task
testtask = Task(
task_name=task_name,
task_directory=task_directory,
restart=restart)
#<--- did the code leave us where we started?
assert os.getcwd() == root_directory
#<--- checking the attributes
assert testtask.is_restart == restart
assert testtask.root_directory == os.getcwd()
assert os.path.abspath(testtask.task_directory) \
== os.path.abspath(task_directory)
assert testtask.task_name == task_name
#<-- check directory structure
assert os.path.isdir(testtask.task_directory)
#<--- cleanup
shutil.rmtree(task_directory)
def test__init____restart__no_cleanup():
#<--- create simulation variables
task_name = "task_name"
task_directory = "task_directory"
restart = True
root_directory = os.getcwd()
#<--- setup conditions for the test
if os.path.isdir(task_directory):
shutil.rmtree(task_directory)
#<------- this creates an existing simulation directory
from pypospack.task import Task
testtask = Task(
task_name=task_name,
task_directory=task_directory,
restart=restart)
assert os.path.isdir(task_directory)
#<--- code we are testing
testtask = Task(
task_name=task_name,
task_directory=task_directory,
restart=restart)
#<--- did the code leave us where we started?
assert os.getcwd() == root_directory
#<--- checking the attributes
assert testtask.is_restart == restart
assert testtask.root_directory == os.getcwd()
assert os.path.abspath(testtask.task_directory) \
== os.path.abspath(task_directory)
assert testtask.task_name == task_name
#<-- check directory structure
assert os.path.isdir(testtask.task_directory)
#<--- cleanup
shutil.rmtree(task_directory)
| 31.742138
| 59
| 0.673469
| 600
| 5,047
| 5.408333
| 0.1
| 0.192296
| 0.088752
| 0.069029
| 0.965177
| 0.956857
| 0.938367
| 0.938367
| 0.938367
| 0.938367
| 0
| 0
| 0.22984
| 5,047
| 158
| 60
| 31.943038
| 0.834834
| 0.182485
| 0
| 0.885714
| 0
| 0
| 0.022466
| 0
| 0
| 0
| 0
| 0
| 0.304762
| 1
| 0.057143
| false
| 0
| 0.095238
| 0
| 0.152381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62f50d0b73af49982539f930bc4b61b2edd9ff9a
| 36,704
|
py
|
Python
|
elorating/rpc/ratingserver/rating_server.py
|
Seraphli/elo_rating
|
49ccd6030aec63ae638af1ed7b5a8e870777b006
|
[
"MIT"
] | 1
|
2019-04-20T17:32:16.000Z
|
2019-04-20T17:32:16.000Z
|
elorating/rpc/ratingserver/rating_server.py
|
Seraphli/elo_rating
|
49ccd6030aec63ae638af1ed7b5a8e870777b006
|
[
"MIT"
] | null | null | null |
elorating/rpc/ratingserver/rating_server.py
|
Seraphli/elo_rating
|
49ccd6030aec63ae638af1ed7b5a8e870777b006
|
[
"MIT"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.12.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def ping(self):
pass
def version(self):
pass
def set_result(self, player_1, player_2, result, game_id):
"""
Parameters:
- player_1
- player_2
- result
- game_id
"""
pass
def set_rating(self, player, rating):
"""
Parameters:
- player
- rating
"""
pass
def get_rating(self, player):
"""
Parameters:
- player
"""
pass
def leadboard(self, number):
"""
Parameters:
- number
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def ping(self):
self.send_ping()
self.recv_ping()
def send_ping(self):
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
args = ping_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ping(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = ping_result()
result.read(iprot)
iprot.readMessageEnd()
return
def version(self):
self.send_version()
return self.recv_version()
def send_version(self):
self._oprot.writeMessageBegin('version', TMessageType.CALL, self._seqid)
args = version_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_version(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = version_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "version failed: unknown result")
def set_result(self, player_1, player_2, result, game_id):
"""
Parameters:
- player_1
- player_2
- result
- game_id
"""
self.send_set_result(player_1, player_2, result, game_id)
self.recv_set_result()
def send_set_result(self, player_1, player_2, result, game_id):
self._oprot.writeMessageBegin('set_result', TMessageType.CALL, self._seqid)
args = set_result_args()
args.player_1 = player_1
args.player_2 = player_2
args.result = result
args.game_id = game_id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_result(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = set_result_result()
result.read(iprot)
iprot.readMessageEnd()
return
def set_rating(self, player, rating):
"""
Parameters:
- player
- rating
"""
self.send_set_rating(player, rating)
self.recv_set_rating()
def send_set_rating(self, player, rating):
self._oprot.writeMessageBegin('set_rating', TMessageType.CALL, self._seqid)
args = set_rating_args()
args.player = player
args.rating = rating
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_rating(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = set_rating_result()
result.read(iprot)
iprot.readMessageEnd()
return
def get_rating(self, player):
"""
Parameters:
- player
"""
self.send_get_rating(player)
return self.recv_get_rating()
def send_get_rating(self, player):
self._oprot.writeMessageBegin('get_rating', TMessageType.CALL, self._seqid)
args = get_rating_args()
args.player = player
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_rating(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_rating_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_rating failed: unknown result")
def leadboard(self, number):
"""
Parameters:
- number
"""
self.send_leadboard(number)
return self.recv_leadboard()
def send_leadboard(self, number):
self._oprot.writeMessageBegin('leadboard', TMessageType.CALL, self._seqid)
args = leadboard_args()
args.number = number
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_leadboard(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = leadboard_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "leadboard failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ping"] = Processor.process_ping
self._processMap["version"] = Processor.process_version
self._processMap["set_result"] = Processor.process_set_result
self._processMap["set_rating"] = Processor.process_set_rating
self._processMap["get_rating"] = Processor.process_get_rating
self._processMap["leadboard"] = Processor.process_leadboard
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ping(self, seqid, iprot, oprot):
args = ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = ping_result()
try:
self._handler.ping()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("ping", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_version(self, seqid, iprot, oprot):
args = version_args()
args.read(iprot)
iprot.readMessageEnd()
result = version_result()
try:
result.success = self._handler.version()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("version", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_result(self, seqid, iprot, oprot):
args = set_result_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_result_result()
try:
self._handler.set_result(args.player_1, args.player_2, args.result, args.game_id)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("set_result", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_rating(self, seqid, iprot, oprot):
args = set_rating_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_rating_result()
try:
self._handler.set_rating(args.player, args.rating)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("set_rating", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_rating(self, seqid, iprot, oprot):
args = get_rating_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_rating_result()
try:
result.success = self._handler.get_rating(args.player)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_rating", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_leadboard(self, seqid, iprot, oprot):
args = leadboard_args()
args.read(iprot)
iprot.readMessageEnd()
result = leadboard_result()
try:
result.success = self._handler.leadboard(args.number)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("leadboard", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class ping_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ping_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(ping_args)
ping_args.thrift_spec = (
)
class ping_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ping_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(ping_result)
ping_result.thrift_spec = (
)
class version_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('version_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(version_args)
version_args.thrift_spec = (
)
class version_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('version_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(version_result)
version_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class set_result_args(object):
"""
Attributes:
- player_1
- player_2
- result
- game_id
"""
def __init__(self, player_1=None, player_2=None, result=None, game_id=None,):
self.player_1 = player_1
self.player_2 = player_2
self.result = result
self.game_id = game_id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.player_1 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.player_2 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BYTE:
self.result = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.game_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('set_result_args')
if self.player_1 is not None:
oprot.writeFieldBegin('player_1', TType.STRING, 1)
oprot.writeString(self.player_1.encode('utf-8') if sys.version_info[0] == 2 else self.player_1)
oprot.writeFieldEnd()
if self.player_2 is not None:
oprot.writeFieldBegin('player_2', TType.STRING, 2)
oprot.writeString(self.player_2.encode('utf-8') if sys.version_info[0] == 2 else self.player_2)
oprot.writeFieldEnd()
if self.result is not None:
oprot.writeFieldBegin('result', TType.BYTE, 3)
oprot.writeByte(self.result)
oprot.writeFieldEnd()
if self.game_id is not None:
oprot.writeFieldBegin('game_id', TType.STRING, 4)
oprot.writeString(self.game_id.encode('utf-8') if sys.version_info[0] == 2 else self.game_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(set_result_args)
set_result_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'player_1', 'UTF8', None, ), # 1
(2, TType.STRING, 'player_2', 'UTF8', None, ), # 2
(3, TType.BYTE, 'result', None, None, ), # 3
(4, TType.STRING, 'game_id', 'UTF8', None, ), # 4
)
class set_result_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('set_result_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(set_result_result)
set_result_result.thrift_spec = (
)
class set_rating_args(object):
"""
Attributes:
- player
- rating
"""
def __init__(self, player=None, rating=None,):
self.player = player
self.rating = rating
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.player = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I16:
self.rating = iprot.readI16()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('set_rating_args')
if self.player is not None:
oprot.writeFieldBegin('player', TType.STRING, 1)
oprot.writeString(self.player.encode('utf-8') if sys.version_info[0] == 2 else self.player)
oprot.writeFieldEnd()
if self.rating is not None:
oprot.writeFieldBegin('rating', TType.I16, 2)
oprot.writeI16(self.rating)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(set_rating_args)
set_rating_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'player', 'UTF8', None, ), # 1
(2, TType.I16, 'rating', None, None, ), # 2
)
class set_rating_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('set_rating_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(set_rating_result)
set_rating_result.thrift_spec = (
)
class get_rating_args(object):
"""
Attributes:
- player
"""
def __init__(self, player=None,):
self.player = player
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.player = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('get_rating_args')
if self.player is not None:
oprot.writeFieldBegin('player', TType.STRING, 1)
oprot.writeString(self.player.encode('utf-8') if sys.version_info[0] == 2 else self.player)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(get_rating_args)
get_rating_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'player', 'UTF8', None, ), # 1
)
class get_rating_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I16:
self.success = iprot.readI16()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('get_rating_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I16, 0)
oprot.writeI16(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(get_rating_result)
get_rating_result.thrift_spec = (
(0, TType.I16, 'success', None, None, ), # 0
)
class leadboard_args(object):
"""
Attributes:
- number
"""
def __init__(self, number=None,):
self.number = number
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BYTE:
self.number = iprot.readByte()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('leadboard_args')
if self.number is not None:
oprot.writeFieldBegin('number', TType.BYTE, 1)
oprot.writeByte(self.number)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(leadboard_args)
leadboard_args.thrift_spec = (
None, # 0
(1, TType.BYTE, 'number', None, None, ), # 1
)
class leadboard_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('leadboard_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(leadboard_result)
leadboard_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
fix_spec(all_structs)
del all_structs
| 32.859445
| 134
| 0.5983
| 4,012
| 36,704
| 5.204636
| 0.042622
| 0.015085
| 0.027154
| 0.013792
| 0.834778
| 0.805182
| 0.778363
| 0.761841
| 0.746229
| 0.739668
| 0
| 0.006205
| 0.297515
| 36,704
| 1,116
| 135
| 32.888889
| 0.803638
| 0.0179
| 0
| 0.769053
| 1
| 0
| 0.03674
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12933
| false
| 0.006928
| 0.009238
| 0.04157
| 0.251732
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c53f6f5abfb7917ac5cd3309d32eebbf21a9579f
| 102
|
py
|
Python
|
fastdtw/__init__.py
|
lovepocky/fastdtw
|
f60e24844f59c5d039748a9d3562582e6be41e29
|
[
"MIT"
] | 612
|
2015-04-10T13:59:28.000Z
|
2022-03-21T11:43:46.000Z
|
fastdtw/__init__.py
|
lovepocky/fastdtw
|
f60e24844f59c5d039748a9d3562582e6be41e29
|
[
"MIT"
] | 45
|
2015-11-19T17:28:37.000Z
|
2021-10-06T15:40:28.000Z
|
fastdtw/__init__.py
|
lovepocky/fastdtw
|
f60e24844f59c5d039748a9d3562582e6be41e29
|
[
"MIT"
] | 123
|
2015-12-08T23:36:59.000Z
|
2022-03-28T13:26:30.000Z
|
try:
from ._fastdtw import fastdtw, dtw
except ImportError:
from .fastdtw import fastdtw, dtw
| 20.4
| 38
| 0.735294
| 13
| 102
| 5.692308
| 0.538462
| 0.297297
| 0.459459
| 0.648649
| 0.72973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205882
| 102
| 4
| 39
| 25.5
| 0.91358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c55bee16e4a3813b4c1956b4ee3680324f4e33f2
| 44
|
py
|
Python
|
interest_rate.py
|
chapman-cpsc-230/hw1-johns660
|
e84299e7ca8709dae3958bfd8231eee27a01d35f
|
[
"MIT"
] | null | null | null |
interest_rate.py
|
chapman-cpsc-230/hw1-johns660
|
e84299e7ca8709dae3958bfd8231eee27a01d35f
|
[
"MIT"
] | 1
|
2016-02-25T05:26:05.000Z
|
2016-02-25T05:26:05.000Z
|
interest_rate.py
|
chapman-cpsc-230/hw1-johns660
|
e84299e7ca8709dae3958bfd8231eee27a01d35f
|
[
"MIT"
] | null | null | null |
A = 1000
p = 5.0
n = 3
print A*(1+p/100)**n
| 8.8
| 20
| 0.5
| 13
| 44
| 1.692308
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.25
| 44
| 4
| 21
| 11
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.25
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c565d32c14ad0208b86cb397257fd5a4c155fffe
| 16,237
|
py
|
Python
|
src/tuning/repulsive_model.py
|
zhaoliuUT/tuning
|
1f1a9b1d66b994196de076f08f74a48c60dd7141
|
[
"MIT"
] | null | null | null |
src/tuning/repulsive_model.py
|
zhaoliuUT/tuning
|
1f1a9b1d66b994196de076f08f74a48c60dd7141
|
[
"MIT"
] | null | null | null |
src/tuning/repulsive_model.py
|
zhaoliuUT/tuning
|
1f1a9b1d66b994196de076f08f74a48c60dd7141
|
[
"MIT"
] | null | null | null |
import numpy as np
import time
import os
from scipy import special
def tuning_update_inhomo(tuning, weight, sigma_vec, alpha = 2, eta = 1, beta = 0, Lambda = 1,
elastic_term = 'sum',# other options: 'exp', 'expweight', 'rand'
elastic_term_periodic = True, # use closed curve
upper_bound = 1, lower_bound = 0):
'''Gaussian Model, no periodic boundary condition
'''
# tuning.shape = (numNeuro, numBin)
# weight: vector, sum =1, length = numBin
# sigma_vec: vector, gaussian std, length = numBin
# Lambda: number, std for the laplacian term
# eta: number, coefficient of the repulsive term
# alpha: number, exponent of \|x_k - x_l\| in the probability distribution
# beta: number, coefficient of the laplacian term
nNeuro, nBin = tuning.shape
dS = np.zeros((nBin, nBin))
for i in range(nNeuro):
dX = tuning[i,:][None, :] - tuning[i,:][:, None] # dX[k, l]=tuning[i, l]-tuning[i, k]
dS += dX**2
dS = np.sqrt(dS)
# dS[k,l]: euclidean distance between the k-th point and the l-th point
# negative derivatives
u = np.zeros((nNeuro, nBin))
F = (dS**(alpha-2))*np.exp(-dS**alpha/(2*sigma_vec[:,None]**alpha))/sigma_vec[:,None]**(alpha+1)
F2 = (dS**(alpha-2))*np.exp(-dS**alpha/(2*sigma_vec[None,:]**alpha))/sigma_vec[None, :]**(alpha+1)
for i in range(nNeuro):
xi = tuning[i,:]
dX = xi[None, :] - xi[:, None]
u[i,:] = np.sum(weight[:,None]*dX*F,axis = 0)
# inside the sum:
# [k,l] = weight[k]*dX[k,l]*(dS[k,l]**(alpha-2))
# *np.exp(-dS[k,l]**alpha/(2*sigma_vec[k]**alpha))/sigma_vec[k]**(alpha+1)
# = weight[k]*(x[l]_i-x[k]_i)*(dS[k,l]**(alpha-2))
# *np.exp(-dS[k,l]**alpha/(2*sigma_vec[k]**alpha))/sigma_vec[k]**(alpha+1)
# dS[k,l] = dS[l,k]
# sum over k for fixed l
u[i,:] += np.sum(weight[:,None]*dX*F2, axis = 0)
# inside the sum:
# [k,l] = weight[k]*dX[k,l]*(dS[k,l]**(alpha-2))
# *np.exp(-dS[k,l]**alpha/(2*sigma_vec[l]**alpha))/sigma_vec[l]**(alpha+1)
# = weight[k]*(x1[l]_i-x1[k]_i)*(dS[k,l]**(alpha-2))
# *np.exp(-dS[k,l]**alpha/(2*sigma_vec[l]**alpha))/sigma_vec[l]**(alpha+1)
# sum over k for fixed l
u = weight*alpha*0.5*u # u[i,k] = weight[k]*u[i,k]*alpha*0.5
# positive derivatives of the elastic term
dl = np.zeros((nNeuro, nBin))
if beta != 0:
diffx = np.zeros((nNeuro, nBin))
diffx[:, 0:-1] = np.diff(tuning, axis = 1) # x[k+1]_i - x[k]_i
if elastic_term_periodic:
diffx[:, -1] = tuning[:, 0] - tuning[:, -1]
revdiffx =-np.roll(diffx, 1, axis = 1) # x[k-1]_i - x[k]_i
diffS = np.sqrt(np.sum(diffx**2, axis = 0)) # sum diffx[i,k]**2 from i=1 to nNeuro
revdiffS = np.roll(diffS, 1)
if elastic_term == 'sum':
diffpow = diffS**(alpha-2)
revdiffpow = revdiffS**(alpha-2)
dl = diffx*diffpow + revdiffx*revdiffpow
dl *= 0.5*alpha
elif elastic_term == 'exp':
expdiffpow = (diffS**(alpha-2))*np.exp(-diffS**alpha/(2*Lambda**alpha))
exprevdiffpow = (revdiffS**(alpha-2))*np.exp(-revdiffS**alpha/(2*Lambda**alpha))
dl = diffx*expdiffpow + revdiffx*exprevdiffpow
dl *= 0.5*alpha/Lambda**3
elif elastic_term == 'expweight':
expdiffpow = (diffS**(alpha-2))*np.exp(-diffS**alpha/(2*Lambda**alpha))
exprevdiffpow = (revdiffS**(alpha-2))*np.exp(-revdiffS**alpha/(2*Lambda**alpha))
dl = weight*np.roll(weight,-1)*diffx*expdiffpow + \
weight*np.roll(weight,1)*revdiffx*exprevdiffpow
dl *= 0.5*alpha/Lambda**3
elif elastic_term == 'rand':
dl = np.random.randn(nNeuro, nBin)
if elastic_term in ['sum', 'exp', 'expweight'] and (not elastic_term_periodic):
dl[:,0] = 2*dl[:,0]
dl[:, -1] = 2*dl[:, -1]
tuningnew = tuning + eta*u + beta*dl
tuningnew[tuningnew > upper_bound] = upper_bound
tuningnew[tuningnew < lower_bound] = lower_bound
return tuningnew
def conditional_probability_matrix_gaussian(tuning, sigma_vec, alpha = 2):
nNeuro, nBin = tuning.shape
dS = np.zeros((nBin, nBin))
for i in range(tuning.shape[0]):
dX = tuning[i,:][None, :] - tuning[i,:][:, None] # dX[k, l]=tuning[i, l]-tuning[i, k]
dS += dX**2
dS = np.sqrt(dS)
D = np.exp(-dS**alpha/(2*sigma_vec[:,None]**alpha))/sigma_vec[:,None]
return D
def evaluate_elastic_term(tuning, weight, Lambda, alpha = 2, beta = 0, elastic_term = 'sum', elastic_term_periodic = True):
if beta == 0:
return 0
nNeuro, nBin = tuning.shape
diffx = np.zeros((nNeuro, nBin))
diffx[:, 0:-1] = np.diff(tuning, axis = 1) # x[k+1]_i - x[k]_i
if elastic_term_periodic:
diffx[:, -1] = tuning[:, 0] - tuning[:, -1]
diffS = np.sqrt(np.sum(diffx**2, axis = 0)) # sum diffx[i,k]**2 from i=1 to nNeuro
elastic_value = 0
if elastic_term == 'sum':
elastic_value = -0.5*np.sum(diffS**alpha)
elif elastic_term == 'exp':
elastic_value = np.sum(np.exp(-diffS**alpha/(2*Lambda**alpha)))/Lambda
elif elastic_term == 'expweight':
elastic_value = np.sum(weight*np.roll(weight,-1)*np.exp(-diffS**alpha/(2*Lambda**alpha)))/Lambda
# weight[i]*weight[i+1]
return elastic_value*beta
def tuning_update_inhomo_periodic(tuning, weight, kappa_mat, alpha = 2, eta = 1, beta = 0, Lambda = 1,
elastic_term = 'exp', # other options: 'expweight', 'rand'
elastic_term_periodic = True, # use closed curve
):
'''Von-Mises Model, periodic b.c. in [-1, 1]^k
'''
# tuning.shape = (numNeuro, numBin)
# weight: vector, sum =1, length = numBin
# kappa_mat: matrix, kappa_mat[:,l] = [kappa_{l,1},kappa_{l,2}] # shape = (2, numBin)
# Lambda: number, 'kappa' parameter for the elastic term
# (theoretically we can also modify it to be a vector of size 2, not implemented yet)
# when elastic_term takes 'rand', Lambda serves as 'kappa' parameter of noise.
# alpha: number, exponent of 1-cos(x_k - x_l) term in the probability distribution
# eta: number, coefficient of the repulsive term
# beta: number, coefficient of the laplacian term
nNeuro, nBin = tuning.shape
P = np.zeros((nBin, nBin))
for i in range(nNeuro):
dX = tuning[i,:][None, :] - tuning[i,:][:, None] # dX[k, l]=tuning[i, l]-tuning[i, k]
P += kappa_mat[i,:]*(1-np.cos(np.pi*dX))**(0.5*alpha)
P = np.exp(-P)
# inside exp:
#[k,l] = kappa_mat[0,l]*(1-np.cos(np.pi*(x1[l] - x1[k])))**(alpha/2)
# + kappa_mat[1,l]*(1-np.cos(np.pi*(x2[l] - x2[k])))**(alpha/2)
constants = np.exp(kappa_mat)/ special.iv(0, kappa_mat)/2
P *= np.prod(constants, axis = 0)
# at[k,l] position multiply by constants[0, l]*constants[1,l]
# negative derivatives
u = np.zeros((nNeuro, nBin))
for i in range(nNeuro):
xi = tuning[i,:]
dX = xi[None, :] - xi[:, None]
vi = (1-np.cos(np.pi*dX))**(0.5*alpha-1)
u[i,:] = np.sum(weight[:,None]*vi*kappa_mat[i,:]*np.sin(np.pi*dX)*P, axis = 0)
# inside the sum:
# [k,l] = weight[k]*(1-np.cos(np.pi*(x[l]_i-x[k]_i)))**(alpha/2-1)\
# *kappa_mat[i,l]*np.sin(np.pi*(x[l]_i - x[k]_i) )*P[k,l]
# sum over k for fixed l
u[i,:] += np.sum(weight[:,None]*vi*kappa_mat[i,:][:,None]*np.sin(np.pi*dX)*P.T, axis = 0)
# inside the sum:
# [k,l] = weight[k]*(1-np.cos(np.pi*(x[l]_i-x[k]_i)))**(alpha/2-1)\
# *kappa_mat[i,k]*np.sin(np.pi*(x[l]_i-x[k]_i))*P[l,k]
# sum over k for fixed l
u = np.pi*0.5*alpha*weight*u # u[i,k] = constant*weight[k]*u[i,k]
# positive derivatives of the elastic term
dl = np.zeros((nNeuro, nBin))
if beta != 0:
diffx = np.zeros((nNeuro, nBin))
diffx[:, 0:-1] = np.diff(tuning, axis = 1) # x[k+1]_i - x[k]_i
if elastic_term_periodic:
diffx[:, -1] = tuning[:, 0] - tuning[:, -1]
revdiffx =-np.roll(diffx, 1, axis = 1) # x[k-1]_i - x[k]_i
diffP = np.sum(Lambda*(1-np.cos(np.pi*diffx))**(0.5*alpha), axis = 0)
# diffP[k] = sum Lambda*(1-cos(pi*diffx[i,k]))**(alpha/2) from i=0 to nNeuro-1
diffP = np.exp(-diffP)
revdiffP = np.roll(diffP, 1) # np.exp(-revdiffP)
Lambda_constant = (np.exp(Lambda)/ special.iv(0, Lambda)/2)**nNeuro #take power of nNeuro
if elastic_term == 'exp':
dl = (1-np.cos(np.pi*diffx))**(0.5*alpha-1)*np.sin(np.pi*diffx)*diffP
dl += (1-np.cos(np.pi*revdiffx))**(0.5*alpha-1)*np.sin(np.pi*revdiffx)*revdiffP
dl *= 0.5*alpha*np.pi*Lambda*Lambda_constant
elif elastic_term == 'expweight':
dl = weight*np.roll(weight,-1)*\
(1-np.cos(np.pi*diffx))**(0.5*alpha-1)*np.sin(np.pi*diffx)*diffP
dl += weight*np.roll(weight,1)*\
(1-np.cos(np.pi*revdiffx))**(0.5*alpha-1)*np.sin(np.pi*revdiffx)*revdiffP
dl *= 0.5*alpha*np.pi*Lambda*Lambda_constant
elif elastic_term == 'rand':
dl = np.random.vonmises(0, Lambda, size = (nNeuro, nBin))/np.pi # map to [-1, 1]
if elastic_term in ['exp', 'expweight'] and (not elastic_term_periodic):
dl[:, 0] = 2*dl[:, 0]
dl[:, -1] = 2*dl[:, -1]
# upper_bound, lower_bound = (-1, 1)
tuningnew = tuning + eta*u + beta*dl
tuningnew = tuningnew - 2*np.ceil((tuningnew-1)/2.0) # map to [-1, 1]^nNeuro
return tuningnew
def conditional_probability_matrix_vonmises(tuning, kappa_mat, alpha = 2):
nNeuro, nBin = tuning.shape
P = np.zeros((nBin, nBin))
for i in range(nNeuro):
dX = tuning[i,:][None, :] - tuning[i,:][:, None] # dX[k, l]=tuning[i, l]-tuning[i, k]
P += kappa_mat[i,:]*(1-np.cos(np.pi*dX))**(0.5*alpha)
P = np.exp(-P)
constants = np.exp(kappa_mat)/ special.iv(0, kappa_mat)/2
P *= np.prod(constants, axis = 0) # at[k,l] position multiply by constants[0, l]*constants[1,l]*...
return P
def evaluate_elastic_term_periodic(tuning, weight, alpha = 2, beta = 0, elastic_term = 'exp', Lambda=1,
elastic_term_periodic = True):
'''Evaluate the elastic term in the Von Mises Model (including beta)'''
nNeuro, nBin = tuning.shape
if beta == 0:
return 0
diffx = np.zeros((nNeuro, nBin))
diffx[:, 0:-1] = np.diff(tuning, axis = 1) # x[k+1]_i - x[k]_i
if elastic_term_periodic:
diffx[:, -1] = tuning[:, 0] - tuning[:, -1]
diffP = np.sum(Lambda*(1-np.cos(np.pi*diffx))**(0.5*alpha), axis = 0)
# diffP[k] = sum Lambda*(1-cos(pi*diffx[i,k]))**(alpha/2) from i=0 to nNeuro-1
diffP = np.exp(-diffP)
Lambda_constant = (np.exp(Lambda)/ special.iv(0, Lambda)/2)**nNeuro #take power of nNeuro
if elastic_term == 'exp':
elastic_value = np.sum(diffP)*Lambda_constant
elif elastic_term == 'expweight':
elastic_value = np.sum(weight*np.roll(weight,-1)*diffP)*Lambda_constant
return elastic_value*beta
def tuning_update_poisson(tuning, weight, alpha = 2, eta = 1, beta = 0, Lambda = 1,
elastic_term = 'sum',# other options: 'exp', 'expweight', 'rand'
elastic_term_periodic = True, # use closed curve
upper_bound = 1, lower_bound = 0.01):
'''Pseudo-Poisson Model (gaussian with variance=function value), no periodic boundary condition
'''
# tuning.shape = (numNeuro, numBin)
# weight: vector, sum =1, length = numBin
# Lambda: number, std for the laplacian term
# eta: number, coefficient of the repulsive term
# alpha: number, exponent of \|x_k - x_l\| in the probability distribution
# beta: number, coefficient of the laplacian term
nNeuro, nBin = tuning.shape
dS = np.zeros((nBin, nBin))
for i in range(nNeuro):
dX = tuning[i,:][None, :] - tuning[i,:][:, None] # dX[k, l]=tuning[i, l]-tuning[i, k]
dS += dX**2/tuning[i,:][None, :] # (tuning[i, l]-tuning[i, k])^2/tuning[i, l]
dS = np.sqrt(dS)
P = np.exp(-0.5*dS**alpha)
P_constants = np.sqrt(np.prod(tuning, axis = 0)) # product of tuning[i,l] over all i
P /= P_constants[None, :] # P[k,l] = P[k,l]/constants[l]
# negative derivatives
u = np.zeros((nNeuro, nBin))
F = dS**(alpha-2)*P
F2 = (dS.T)**(alpha-2)*P.T
for i in range(nNeuro):
xi = tuning[i,:]
dX = xi[None, :] - xi[:, None] # dX[k, l]=tuning[i, l]-tuning[i, k]
temp = dX*F/xi[None,:] + dX*F2/xi[:,None] #dX[k,l]*F[k,l]/tuning[i,l]+dX[k,l]*F2[k,l]/tuning[i,k]
temp += 1.0/alpha*P/xi[None,:] # P[k,l]/tuning[i,l] term
temp -= 0.5*F*(dX**2)/(xi[None,:]**2) # 0.5*F[k,l]*dX[k,l]**2/(tuning[i,l]**2)
u[i,:] = np.sum(weight[:,None]*temp, axis = 0)
# inside the sum:
# [k,l] = weight[k]*temp[k,l]
# sum over k for fixed l
u = weight*alpha*0.5*u # u[i,k] = weight[k]*u[i,k]*alpha*0.5
# positive derivatives of the elastic term
dl = np.zeros((nNeuro, nBin))
if beta != 0:
diffx = np.zeros((nNeuro, nBin))
diffx[:, 0:-1] = np.diff(tuning, axis = 1) # x[k+1]_i - x[k]_i
if elastic_term_periodic:
diffx[:, -1] = tuning[:, 0] - tuning[:, -1]
revdiffx =-np.roll(diffx, 1, axis = 1) # x[k-1]_i - x[k]_i
diffS = np.sqrt(np.sum(diffx**2, axis = 0)) # sum diffx[i,k]**2 from i=1 to nNeuro
revdiffS = np.roll(diffS, 1)
if elastic_term == 'sum':
diffpow = diffS**(alpha-2)
revdiffpow = revdiffS**(alpha-2)
dl = diffx*diffpow + revdiffx*revdiffpow
dl *= 0.5*alpha
elif elastic_term == 'exp':
expdiffpow = (diffS**(alpha-2))*np.exp(-diffS**alpha/(2*Lambda**alpha))
exprevdiffpow = (revdiffS**(alpha-2))*np.exp(-revdiffS**alpha/(2*Lambda**alpha))
dl = diffx*expdiffpow + revdiffx*exprevdiffpow
dl *= 0.5*alpha/Lambda**3
elif elastic_term == 'expweight':
expdiffpow = (diffS**(alpha-2))*np.exp(-diffS**alpha/(2*Lambda**alpha))
exprevdiffpow = (revdiffS**(alpha-2))*np.exp(-revdiffS**alpha/(2*Lambda**alpha))
dl = weight*np.roll(weight,-1)*diffx*expdiffpow + \
weight*np.roll(weight,1)*revdiffx*exprevdiffpow
dl *= 0.5*alpha/Lambda**3
elif elastic_term == 'rand':
dl = np.random.randn(nNeuro, nBin)
if elastic_term in ['sum', 'exp', 'expweight'] and (not elastic_term_periodic):
dl[:,0] = 2*dl[:,0]
dl[:, -1] = 2*dl[:, -1]
tuningnew = tuning + eta*u + beta*dl
tuningnew[tuningnew > upper_bound] = upper_bound
tuningnew[tuningnew < lower_bound] = lower_bound
return tuningnew
def conditional_probability_matrix_poisson(tuning, alpha = 2):
nNeuro, nBin = tuning.shape
dS = np.zeros((nBin, nBin))
for i in range(nNeuro):
dX = tuning[i,:][None, :] - tuning[i,:][:, None] # dX[k, l]=tuning[i, l]-tuning[i, k]
dS += dX**2/tuning[i,:][None, :] # (tuning[i, l]-tuning[i, k])^2/tuning[i, l]
dS = np.sqrt(dS)
P = np.exp(-0.5*dS**alpha)
P_constants = np.sqrt(np.prod(tuning, axis = 0)) # product of tuning[i,l] over all i
P /= P_constants[None, :] # P[k,l] = P[k,l]/constants[l]
return P
def mutual_distance(tuning):
nNeuro, nBin = tuning.shape
dS = np.zeros((nBin, nBin))
for i in range(nNeuro):
dX = tuning[i,:][None, :] - tuning[i,:][:, None] # dX[k, l]=tuning[i, l]-tuning[i, k]
dS += dX**2
dS = np.sqrt(dS)
return dS
| 42.841689
| 123
| 0.548131
| 2,475
| 16,237
| 3.525657
| 0.069495
| 0.035068
| 0.019253
| 0.017648
| 0.836695
| 0.826037
| 0.804492
| 0.782145
| 0.770342
| 0.751433
| 0
| 0.027174
| 0.265689
| 16,237
| 378
| 124
| 42.955026
| 0.704688
| 0.270801
| 0
| 0.779661
| 0
| 0
| 0.01178
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038136
| false
| 0
| 0.016949
| 0
| 0.101695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d90d7c98e3fb50149c77eb002567f870cf11977
| 122
|
py
|
Python
|
src/interpret.py
|
BotScutters/eye-of-the-needle
|
586d9c1e33763919b70382e67e4c7873bdbb05a8
|
[
"MIT"
] | 16
|
2019-04-08T22:09:51.000Z
|
2021-08-02T18:18:41.000Z
|
src/interpret.py
|
BotScutters/eye-of-the-needle
|
586d9c1e33763919b70382e67e4c7873bdbb05a8
|
[
"MIT"
] | 1
|
2019-11-19T06:27:37.000Z
|
2019-12-26T20:56:03.000Z
|
src/interpret.py
|
BotScutters/eye-of-the-needle
|
586d9c1e33763919b70382e67e4c7873bdbb05a8
|
[
"MIT"
] | 8
|
2019-04-08T23:01:39.000Z
|
2021-08-02T18:18:43.000Z
|
def run():
print("iNterpret: calculating performance metrics...")
print("iNterpret: generating final reports...")
| 30.5
| 58
| 0.696721
| 12
| 122
| 7.083333
| 0.833333
| 0.329412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147541
| 122
| 4
| 59
| 30.5
| 0.817308
| 0
| 0
| 0
| 0
| 0
| 0.680328
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
3d9547048c2c8c5c925bf1dddc2f335dd2f20612
| 7,411
|
py
|
Python
|
tests/test_utils.py
|
JunShern/lm-evaluation-harness
|
84aa15c6e4cb65adf39c2dccf91a799cc7e6440a
|
[
"MIT"
] | 203
|
2021-01-08T16:39:09.000Z
|
2022-03-31T06:03:16.000Z
|
tests/test_utils.py
|
JunShern/lm-evaluation-harness
|
84aa15c6e4cb65adf39c2dccf91a799cc7e6440a
|
[
"MIT"
] | 183
|
2020-12-27T03:41:08.000Z
|
2022-03-19T21:56:53.000Z
|
tests/test_utils.py
|
JunShern/lm-evaluation-harness
|
84aa15c6e4cb65adf39c2dccf91a799cc7e6440a
|
[
"MIT"
] | 73
|
2021-01-05T22:37:01.000Z
|
2022-03-29T10:14:53.000Z
|
from lm_eval.utils import get_rolling_token_windows, make_disjoint_window
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v1():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
([9, 10, 11, 12, 13, 14, 15, 16, 17, 18], [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]),
([19, 20, 21, 22, 23, 24, 25, 26, 27, 28], [20, 21, 22, 23, 24, 25, 26, 27, 28, 29]),
([23, 24, 25, 26, 27, 28, 29, 30, 31, 32], [30, 31, 32, 33]),
]
x = list(range(34))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=1,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v2():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
([2, 3, 4, 5, 6, 7, 8, 9, 10, 11], [10, 11, 12]),
([5, 6, 7, 8, 9, 10, 11, 12, 13, 14], [13, 14, 15]),
([8, 9, 10, 11, 12, 13, 14, 15, 16, 17], [16, 17, 18]),
([11, 12, 13, 14, 15, 16, 17, 18, 19, 20], [19, 20, 21]),
([14, 15, 16, 17, 18, 19, 20, 21, 22, 23], [22, 23, 24]),
([17, 18, 19, 20, 21, 22, 23, 24, 25, 26], [25, 26, 27]),
([20, 21, 22, 23, 24, 25, 26, 27, 28, 29], [28, 29, 30]),
([23, 24, 25, 26, 27, 28, 29, 30, 31, 32], [31, 32, 33]),
]
x = list(range(34))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=8,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v3():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [10]),
([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [11]),
([2, 3, 4, 5, 6, 7, 8, 9, 10, 11], [12]),
([3, 4, 5, 6, 7, 8, 9, 10, 11, 12], [13]),
([4, 5, 6, 7, 8, 9, 10, 11, 12, 13], [14]),
([5, 6, 7, 8, 9, 10, 11, 12, 13, 14], [15]),
([6, 7, 8, 9, 10, 11, 12, 13, 14, 15], [16]),
([7, 8, 9, 10, 11, 12, 13, 14, 15, 16], [17]),
([8, 9, 10, 11, 12, 13, 14, 15, 16, 17], [18]),
([9, 10, 11, 12, 13, 14, 15, 16, 17, 18], [19]),
([10, 11, 12, 13, 14, 15, 16, 17, 18, 19], [20]),
([11, 12, 13, 14, 15, 16, 17, 18, 19, 20], [21]),
([12, 13, 14, 15, 16, 17, 18, 19, 20, 21], [22]),
([13, 14, 15, 16, 17, 18, 19, 20, 21, 22], [23]),
([14, 15, 16, 17, 18, 19, 20, 21, 22, 23], [24]),
([15, 16, 17, 18, 19, 20, 21, 22, 23, 24], [25]),
([16, 17, 18, 19, 20, 21, 22, 23, 24, 25], [26]),
([17, 18, 19, 20, 21, 22, 23, 24, 25, 26], [27]),
([18, 19, 20, 21, 22, 23, 24, 25, 26, 27], [28]),
([19, 20, 21, 22, 23, 24, 25, 26, 27, 28], [29]),
([20, 21, 22, 23, 24, 25, 26, 27, 28, 29], [30]),
([21, 22, 23, 24, 25, 26, 27, 28, 29, 30], [31]),
([22, 23, 24, 25, 26, 27, 28, 29, 30, 31], [32]),
([23, 24, 25, 26, 27, 28, 29, 30, 31, 32], [33]),
]
x = list(range(34))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=10,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v4():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [10]),
([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [11]),
([2, 3, 4, 5, 6, 7, 8, 9, 10, 11], [12]),
([3, 4, 5, 6, 7, 8, 9, 10, 11, 12], [13]),
([4, 5, 6, 7, 8, 9, 10, 11, 12, 13], [14]),
([5, 6, 7, 8, 9, 10, 11, 12, 13, 14], [15]),
([6, 7, 8, 9, 10, 11, 12, 13, 14, 15], [16]),
([7, 8, 9, 10, 11, 12, 13, 14, 15, 16], [17]),
([8, 9, 10, 11, 12, 13, 14, 15, 16, 17], [18]),
([9, 10, 11, 12, 13, 14, 15, 16, 17, 18], [19]),
([10, 11, 12, 13, 14, 15, 16, 17, 18, 19], [20]),
([11, 12, 13, 14, 15, 16, 17, 18, 19, 20], [21]),
([12, 13, 14, 15, 16, 17, 18, 19, 20, 21], [22]),
([13, 14, 15, 16, 17, 18, 19, 20, 21, 22], [23]),
([14, 15, 16, 17, 18, 19, 20, 21, 22, 23], [24]),
([15, 16, 17, 18, 19, 20, 21, 22, 23, 24], [25]),
([16, 17, 18, 19, 20, 21, 22, 23, 24, 25], [26]),
([17, 18, 19, 20, 21, 22, 23, 24, 25, 26], [27]),
([18, 19, 20, 21, 22, 23, 24, 25, 26, 27], [28]),
([19, 20, 21, 22, 23, 24, 25, 26, 27, 28], [29]),
]
x = list(range(30))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=10,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v5():
gold = [
([-100, 0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
([9, 10, 11, 12, 13, 14, 15, 16, 17, 18], [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]),
([19, 20, 21, 22, 23, 24, 25, 26, 27, 28], [20, 21, 22, 23, 24, 25, 26, 27, 28, 29]),
]
x = list(range(30))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=10,
context_len=1,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
# noinspection DuplicatedCode
def test_get_rolling_token_windows_v6():
gold = [
([-100, 0], [0, 1]),
([1, 2], [2, 3]),
([3, 4], [4, 5]),
([5, 6], [6, 7]),
([6, 7], [8]),
]
x = list(range(9))
generator = get_rolling_token_windows(
token_list=x,
prefix_token=-100,
max_seq_len=2,
context_len=1,
)
pred_length = 0
output = []
for input_tokens, pred_tokens in generator:
output.append((input_tokens, pred_tokens))
pred_length += len(pred_tokens)
assert pred_length == len(x)
assert gold == output
def test_get_rolling_token_windows_empty():
generator = get_rolling_token_windows(
token_list=[],
prefix_token=-100,
max_seq_len=2,
context_len=1,
)
n = 0
for _ in generator:
n += 1
assert n == 0
def test_make_disjoint_window():
assert make_disjoint_window(([1,2,3,4,5], [2,3,4,5,6])) == ([1], [2,3,4,5,6])
assert make_disjoint_window(([1,2,3,4,5], [4,5,6])) == ([1,2,3], [4,5,6])
| 35.801932
| 93
| 0.483605
| 1,262
| 7,411
| 2.7187
| 0.062599
| 0.016905
| 0.023608
| 0.02798
| 0.939376
| 0.938793
| 0.930341
| 0.918683
| 0.902652
| 0.880501
| 0
| 0.284384
| 0.304412
| 7,411
| 207
| 94
| 35.801932
| 0.381183
| 0.022534
| 0
| 0.733696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081522
| 1
| 0.043478
| false
| 0
| 0.005435
| 0
| 0.048913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3db95bb88081a6a9b2d7cbfab501c6ed02c2ec9a
| 101
|
py
|
Python
|
test/run/t158.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t158.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t158.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
print type(1) == int
print type(2**10) == int
# print type(2**1024) == long
print type("wee") == str
| 20.2
| 29
| 0.60396
| 18
| 101
| 3.388889
| 0.555556
| 0.590164
| 0.393443
| 0.42623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 0.168317
| 101
| 4
| 30
| 25.25
| 0.619048
| 0.267327
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
3dc06c3f7eb09c86ca5ae80333bd6d006b058ba9
| 10,609
|
py
|
Python
|
tests/model/test_guest_media.py
|
bcurnow/rfid-security-svc
|
d3806cb74d3d0cc2623ea425230dc8781ba4d8b4
|
[
"Apache-2.0"
] | null | null | null |
tests/model/test_guest_media.py
|
bcurnow/rfid-security-svc
|
d3806cb74d3d0cc2623ea425230dc8781ba4d8b4
|
[
"Apache-2.0"
] | null | null | null |
tests/model/test_guest_media.py
|
bcurnow/rfid-security-svc
|
d3806cb74d3d0cc2623ea425230dc8781ba4d8b4
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from unittest.mock import patch
import rfidsecuritysvc.model.guest_media as model
from rfidsecuritysvc.exception import GuestNotFoundError, MediaNotFoundError, SoundNotFoundError
from rfidsecuritysvc.model.color import Color
from rfidsecuritysvc.model.guest import Guest
from rfidsecuritysvc.model.guest_media import GuestMedia
from rfidsecuritysvc.model.media import Media
from rfidsecuritysvc.model.sound import Sound
def test_GuestMedia(assert_model, open_door_guest, open_door_media, default_sound, default_color):
assert_model(_model(1, open_door_guest, open_door_media, default_sound, default_color),
GuestMedia(1, open_door_guest, open_door_media, default_sound, default_color))
def test_GuestMedia_to_json(open_door_guest, open_door_media, default_sound, default_color):
json = GuestMedia(1, open_door_guest, open_door_media, default_sound, default_color).to_json()
assert json['id'] == 1
assert json['guest'] == open_door_guest.to_json()
assert json['media'] == open_door_media.to_json()
assert json['sound'] == default_sound.to_json()
assert json['color'] == default_color.to_json()
@patch('rfidsecuritysvc.model.guest_media.table')
def test_get(table):
table.get.return_value = _default().test_to_row()
assert model.get(1) == _default()
table.get.assert_called_once_with(1)
@patch('rfidsecuritysvc.model.guest_media.table')
def test_get_by_media(table):
table.get_by_media.return_value = _default().test_to_row()
assert model.get_by_media('test') == _default()
table.get_by_media.assert_called_once_with('test')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_get_notfound(table):
table.get.return_value = None
assert model.get(1) is None
table.get.assert_called_once_with(1)
@patch('rfidsecuritysvc.model.guest_media.table')
def test_list(table):
table.list.return_value = [
_default().test_to_row(),
_default(2).test_to_row(),
]
models = model.list()
table.list.assert_called_once()
assert models == [_default(), _default(2)]
@patch('rfidsecuritysvc.model.guest_media.table')
def test_list_with_guest_id(table):
table.list.return_value = [
_default().test_to_row(),
_default(2).test_to_row(),
]
models = model.list(1)
table.list.assert_called_once_with(1)
assert models == [_default(), _default(2)]
@patch('rfidsecuritysvc.model.guest_media.table')
def test_list_noresults(table):
table.list.return_value = []
models = model.list()
table.list.assert_called_once()
assert models == []
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_create(table, guest, media, sound, default_sound):
guest.get.return_value = Guest(1, 'first_name', 'last_name')
media.get.return_value = Media('test', 'test')
sound.get.return_value = default_sound
table.create.return_value = None
assert model.create(1, 'test', default_sound.id, 0xABCDEF) is None
guest.get.assert_called_once_with(1)
media.get.assert_called_once_with('test')
sound.get.assert_called_once_with(default_sound.id)
table.create.assert_called_once_with(1, 'test', default_sound.id, 0xABCDEF)
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_create_no_prefs(table, guest, media, sound):
guest.get.return_value = Guest(1, 'first_name', 'last_name')
media.get.return_value = Media('test', 'test')
table.create.return_value = None
assert model.create(1, 'test', None, None) is None
guest.get.assert_called_once_with(1)
media.get.assert_called_once_with('test')
sound.get.assert_not_called()
table.create.assert_called_once_with(1, 'test', None, None)
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_create_no_guest(table, guest, media, sound):
guest.get.return_value = None
with pytest.raises(GuestNotFoundError):
model.create(1, 'test', None, None)
guest.get.assert_called_once_with(1)
media.get.assert_not_called()
sound.get.assert_not_called()
table.create.assert_not_called()
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_create_no_media(table, guest, media, sound):
guest.get.return_value = Guest(1, 'first_name', 'last_name')
media.get.return_value = None
with pytest.raises(MediaNotFoundError):
model.create(1, 'test', None, None)
guest.get.assert_called_once_with(1)
media.get.assert_called_once_with('test')
sound.get.assert_not_called()
table.create.assert_not_called()
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_create_no_sound(table, guest, media, sound, default_sound):
guest.get.return_value = Guest(1, 'first_name', 'last_name')
media.get.return_value = Media('test', 'test')
sound.get.return_value = None
with pytest.raises(SoundNotFoundError):
model.create(1, 'test', default_sound.id, None) is None
guest.get.assert_called_once_with(1)
media.get.assert_called_once_with('test')
sound.get.assert_called_once_with(default_sound.id)
table.create.assert_not_called()
@patch('rfidsecuritysvc.model.guest_media.table')
def test_delete(table):
table.delete.return_value = 1
assert model.delete(1) == 1
table.delete.assert_called_with(1)
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_update(table, guest, media, sound, default_sound):
guest.get.return_value = Guest(1, 'first_name', 'last_name')
media.get.return_value = Media('test', 'test')
sound.get.return_value = default_sound
table.update.return_value = None
assert model.update(1, 1, 'test', default_sound.id, 0xABCDEF) is None
guest.get.assert_called_once_with(1)
media.get.assert_called_once_with('test')
sound.get.assert_called_once_with(default_sound.id)
table.update.assert_called_once_with(1, 1, 'test', default_sound.id, 0xABCDEF)
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_update_no_sound(table, guest, media, sound):
guest.get.return_value = Guest(1, 'first_name', 'last_name')
media.get.return_value = Media('test', 'test')
table.update.return_value = None
assert model.update(1, 1, 'test', None, 0xABCDEF) is None
guest.get.assert_called_once_with(1)
media.get.assert_called_once_with('test')
sound.get.assert_not_called()
table.update.assert_called_once_with(1, 1, 'test', None, 0xABCDEF)
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_update_SoundNotFoundError(table, guest, media, sound, default_sound):
guest.get.return_value = Guest(1, 'first_name', 'last_name')
media.get.return_value = Media('test', 'test')
sound.get.return_value = None
with pytest.raises(SoundNotFoundError):
model.update(1, 1, 'test', default_sound.id, 0xABCDEF)
guest.get.assert_called_once_with(1)
media.get.assert_called_once_with('test')
sound.get.assert_called_once_with(default_sound.id)
table.update.assert_not_called()
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_update_MediaNotFoundError(table, guest, media, sound, default_sound):
guest.get.return_value = Guest(1, 'first_name', 'last_name')
media.get.return_value = None
with pytest.raises(MediaNotFoundError):
model.update(1, 1, 'test', default_sound.id, 0xABCDEF)
guest.get.assert_called_once_with(1)
media.get.assert_called_once_with('test')
sound.get.assert_not_called()
table.update.assert_not_called()
@patch('rfidsecuritysvc.model.guest_media.soundModel')
@patch('rfidsecuritysvc.model.guest_media.media')
@patch('rfidsecuritysvc.model.guest_media.guest')
@patch('rfidsecuritysvc.model.guest_media.table')
def test_update_GuestNotFoundError(table, guest, media, sound, default_sound):
guest.get.return_value = None
with pytest.raises(GuestNotFoundError):
model.update(1, 1, 'test', default_sound.id, 0xABCDEF)
guest.get.assert_called_once_with(1)
media.get.assert_not_called()
sound.get.assert_not_called()
table.update.assert_not_called()
def test__model_no_guest_color(creatable_guest_media):
row = creatable_guest_media.test_to_row()
row['guest_color'] = None
gm = model.__model(row)
assert gm.guest.color is None
def test__model_no_guest_sound(creatable_guest_media):
row = creatable_guest_media.test_to_row()
row['guest_sound'] = None
gm = model.__model(row)
assert gm.guest.sound is None
def test__model_no_color(creatable_guest_media):
row = creatable_guest_media.test_to_row()
row['color'] = None
gm = model.__model(row)
assert gm.color is None
def test__model_no_sound(creatable_guest_media):
row = creatable_guest_media.test_to_row()
row['sound'] = None
gm = model.__model(row)
assert gm.sound is None
def _default(index=1):
s = Sound(1, 'test.wav', '2021-09-25 23:13:25')
c = Color(0xABCDEF)
g = Guest(index, f'test guest_first_name {index}', f'test guest_last_name {index}', s, c)
m = Media(f'test media_id {index}', f'test media_name {index}', f'test media_desc {index}')
return _model(index, g, m, s, c)
def _model(id, guest, media, sound=None, color=None):
return GuestMedia(id, guest, media, sound, color)
| 38.438406
| 98
| 0.751343
| 1,484
| 10,609
| 5.088949
| 0.047844
| 0.091367
| 0.165519
| 0.19465
| 0.84706
| 0.825477
| 0.82071
| 0.810117
| 0.783898
| 0.754237
| 0
| 0.0089
| 0.120935
| 10,609
| 275
| 99
| 38.578182
| 0.800879
| 0
| 0
| 0.636771
| 0
| 0
| 0.224715
| 0.177491
| 0
| 0
| 0.007541
| 0
| 0.309417
| 1
| 0.112108
| false
| 0
| 0.040359
| 0.004484
| 0.161435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a7235276f360bcb1581144c89bd0544c6a44757
| 161
|
py
|
Python
|
utils/lists.py
|
EladSharony/Mechanics
|
078f97bea84114fc1db6fe9700b92b96b18a0d5e
|
[
"MIT"
] | 24
|
2021-02-23T13:53:14.000Z
|
2022-03-29T16:40:56.000Z
|
utils/lists.py
|
EladSharony/Mechanics
|
078f97bea84114fc1db6fe9700b92b96b18a0d5e
|
[
"MIT"
] | 2
|
2021-04-23T12:30:32.000Z
|
2022-03-31T10:51:12.000Z
|
utils/lists.py
|
EladSharony/Mechanics
|
078f97bea84114fc1db6fe9700b92b96b18a0d5e
|
[
"MIT"
] | 12
|
2021-04-11T20:44:03.000Z
|
2022-03-30T19:23:58.000Z
|
def list_of_zeros(length: int):
return [0] * length
def list_of_list_of_zeros(rows: int, cols: int):
return [list_of_zeros(cols) for _ in range(rows)]
| 23
| 53
| 0.708075
| 28
| 161
| 3.75
| 0.464286
| 0.228571
| 0.314286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007519
| 0.173913
| 161
| 6
| 54
| 26.833333
| 0.781955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9a939618b5f16cb74d6633892169239c1530a35d
| 316
|
py
|
Python
|
src/api/camera/picture.py
|
ScottDay/DFN-Maintenance-GUI-Backend
|
bfb05c75747fa9c334224b99609baef7321860a4
|
[
"MIT"
] | 2
|
2017-03-31T00:57:35.000Z
|
2017-08-04T10:38:28.000Z
|
src/api/camera/picture.py
|
CPedersen3245/Desert-Fireball-Maintainence-GUI
|
bfb05c75747fa9c334224b99609baef7321860a4
|
[
"MIT"
] | 10
|
2017-03-29T04:13:14.000Z
|
2017-08-14T06:14:52.000Z
|
src/api/camera/picture.py
|
ScottDay/DFN-Maintenance-GUI-Backend
|
bfb05c75747fa9c334224b99609baef7321860a4
|
[
"MIT"
] | 4
|
2017-12-23T03:16:00.000Z
|
2018-06-20T07:15:50.000Z
|
import src.wrappers as wrappers
@wrappers.jwt
@wrappers.endpoint
@wrappers.stats
@wrappers.logger('TODO: Implement endpoint')
@wrappers.injector
def download(handler):
pass
@wrappers.jwt
@wrappers.endpoint
@wrappers.stats
@wrappers.logger('TODO: Implement endpoint')
@wrappers.injector
def find(handler):
pass
| 15.8
| 44
| 0.787975
| 39
| 316
| 6.384615
| 0.410256
| 0.257028
| 0.15261
| 0.216867
| 0.75502
| 0.75502
| 0.75502
| 0.75502
| 0.75502
| 0.75502
| 0
| 0
| 0.094937
| 316
| 19
| 45
| 16.631579
| 0.870629
| 0
| 0
| 0.8
| 0
| 0
| 0.151899
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0.133333
| 0.066667
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
9ad06c4bb40f53eaca07f4867da39ef8dcc72b41
| 121
|
py
|
Python
|
montepython/likelihoods/extended_plik_Planck_lowl_TT/__init__.py
|
ivandebono/montepython_public_3.2dev_Python3
|
16771c3d37faaa3f80b171c01d78da56a75aa3d9
|
[
"MIT"
] | null | null | null |
montepython/likelihoods/extended_plik_Planck_lowl_TT/__init__.py
|
ivandebono/montepython_public_3.2dev_Python3
|
16771c3d37faaa3f80b171c01d78da56a75aa3d9
|
[
"MIT"
] | null | null | null |
montepython/likelihoods/extended_plik_Planck_lowl_TT/__init__.py
|
ivandebono/montepython_public_3.2dev_Python3
|
16771c3d37faaa3f80b171c01d78da56a75aa3d9
|
[
"MIT"
] | null | null | null |
from montepython.likelihood_class import Likelihood_clik
class extended_plik_Planck_lowl_TT(Likelihood_clik):
pass
| 20.166667
| 56
| 0.859504
| 16
| 121
| 6.0625
| 0.75
| 0.28866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107438
| 121
| 5
| 57
| 24.2
| 0.898148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
9afdc4c833bc248918013ec47848b1b410c62331
| 5,661
|
py
|
Python
|
Repository_files/TLCS/models/create_plots.py
|
kraken24/mhp_hackathon_sustainable_traffic_light_challenge
|
0b4c61968d54c7e8faceb07a2c78c70570a2e162
|
[
"CC0-1.0"
] | null | null | null |
Repository_files/TLCS/models/create_plots.py
|
kraken24/mhp_hackathon_sustainable_traffic_light_challenge
|
0b4c61968d54c7e8faceb07a2c78c70570a2e162
|
[
"CC0-1.0"
] | null | null | null |
Repository_files/TLCS/models/create_plots.py
|
kraken24/mhp_hackathon_sustainable_traffic_light_challenge
|
0b4c61968d54c7e8faceb07a2c78c70570a2e162
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 5 02:12:12 2022
@author: Kraken
Project: MHP Hackathon
"""
import os
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 16})
WORKING_DIR = "model_14"
WORKING_DIR2 = "model_12"
# "model_8": dqn with fixed weights
# "model_4": dqn
MVG_AVG_WINDOW = 5
# =============================================================================
# Queue Plots - Combined
# =============================================================================
QUEUE = "plot_queue_data.txt"
# rl agent
with open(os.path.join(WORKING_DIR, QUEUE), "r") as txtfile:
data = txtfile.readlines()
data = [float(x.rstrip("\n")) for x in data][:250]
# ctl
with open(os.path.join(WORKING_DIR2, QUEUE), "r") as txtfile:
data2 = txtfile.readlines()
data2 = [float(x.rstrip("\n")) for x in data2]
fig = plt.figure(figsize=(12, 8))
plt.plot(data2, "blue", label="Conventional Traffic Lights")
plt.plot(data, "orange", label="RL Agent")
plt.xlabel("# Episodes")
plt.ylabel("Average queue length (vehicles)")
plt.title("Conventional Traffic Lights & RL Optimized Smart Traffic Lights")
plt.grid()
plt.legend(loc="upper right")
plt.savefig(QUEUE.replace("_data.txt", "_combined.png"))
# =============================================================================
# Delay Plots - Combined
# =============================================================================
QUEUE = "plot_delay_data.txt"
# rl agent
with open(os.path.join(WORKING_DIR, QUEUE), "r") as txtfile:
data = txtfile.readlines()
data = [float(x.rstrip("\n")) for x in data][:250]
# ctl
with open(os.path.join(WORKING_DIR2, QUEUE), "r") as txtfile:
data2 = txtfile.readlines()
data2 = [float(x.rstrip("\n")) for x in data2]
fig = plt.figure(figsize=(12, 8))
plt.plot(data, "orange", label="RL Agent")
plt.plot(data2, "blue", label="Conventional Traffic Lights")
plt.xlabel("# Episodes")
plt.ylabel("Cumulative Delay (s)")
plt.title("Conventional Traffic Lights & RL Optimized Smart Traffic Lights")
plt.grid()
plt.legend(loc="upper right")
plt.savefig(QUEUE.replace("_data.txt", "_combined.png"))
# =============================================================================
# Reward Plots - Combined
# =============================================================================
QUEUE = "plot_reward_data.txt"
# rl agent
with open(os.path.join(WORKING_DIR, QUEUE), "r") as txtfile:
data = txtfile.readlines()
data = [float(x.rstrip("\n")) for x in data][:250]
# ctl
with open(os.path.join(WORKING_DIR2, QUEUE), "r") as txtfile:
data2 = txtfile.readlines()
data2 = [float(x.rstrip("\n")) for x in data2]
fig = plt.figure(figsize=(12, 8))
plt.plot(data, "orange", label="RL Agent")
plt.plot(data2, "blue", label="Conventional Traffic Lights")
plt.xlabel("# Episodes")
plt.ylabel("Cumulative Negative Reward")
plt.title("Conventional Traffic Lights & RL Optimized Smart Traffic Lights")
plt.grid()
plt.legend(loc="best")
plt.savefig(QUEUE.replace("_data.txt", "_combined.png"))
WORKING_DIR = "model_14"
MVG_AVG_WINDOW = 5
# =============================================================================
# Queue Plots
# =============================================================================
QUEUE = "plot_queue_data.txt"
with open(os.path.join(WORKING_DIR, QUEUE), "r") as txtfile:
data = txtfile.readlines()
data = [float(x.rstrip("\n")) for x in data]
data_series = pd.Series(data).rolling(MVG_AVG_WINDOW).mean().tolist()
first_value = data_series[MVG_AVG_WINDOW - 1]
last_value = data_series[-1]
perc_decrease = (first_value - last_value) / first_value * 100
fig = plt.figure(figsize=(12, 8))
plt.plot(data)
plt.plot(data_series, "r")
plt.xlabel("# Episodes")
plt.ylabel("Average queue length (vehicles)")
plt.title(f"Decrease: {first_value:.2f} -> {last_value:.2f} = {perc_decrease:.2f}%")
plt.savefig(os.path.join(WORKING_DIR, QUEUE.replace("_data.txt", "_new.png")))
# =============================================================================
# Delay Plots
# =============================================================================
DELAY = "plot_delay_data.txt"
with open(os.path.join(WORKING_DIR, DELAY), "r") as txtfile:
data = txtfile.readlines()
data = [float(x.rstrip("\n")) for x in data]
data_series = pd.Series(data).rolling(MVG_AVG_WINDOW).mean().tolist()
first_value = data_series[MVG_AVG_WINDOW - 1]
last_value = data_series[-1]
perc_decrease = (first_value - last_value) / first_value * 100
fig = plt.figure(figsize=(12, 8))
plt.plot(data)
plt.plot(data_series, "r")
plt.xlabel("# Episodes")
plt.ylabel("Cumulative Delay (s) / 1000 vehicles")
plt.title(f"Decrease: {first_value:.2f} -> {last_value:.2f} = {perc_decrease:.2f}%")
plt.savefig(os.path.join(WORKING_DIR, DELAY.replace("_data.txt", "_new.png")))
# =============================================================================
# Reward Plots
# =============================================================================
REWARD = "plot_reward_data.txt"
with open(os.path.join(WORKING_DIR, REWARD), "r") as txtfile:
data = txtfile.readlines()
data = [float(x.rstrip("\n")) for x in data]
data_series = pd.Series(data).rolling(MVG_AVG_WINDOW).mean().tolist()
first_value = data_series[MVG_AVG_WINDOW - 1]
last_value = data_series[-1]
perc_decrease = (first_value - last_value) / first_value * 100
fig = plt.figure(figsize=(12, 8))
plt.plot(data)
plt.plot(data_series, "r")
plt.xlabel("# Episodes")
plt.ylabel("Cumulative negative reward")
plt.title("Reward Maximization by RL Agent")
plt.savefig(os.path.join(WORKING_DIR, REWARD.replace("_data.txt", "_new.png")))
| 35.161491
| 84
| 0.585409
| 729
| 5,661
| 4.414266
| 0.156379
| 0.026103
| 0.03729
| 0.063393
| 0.878496
| 0.85115
| 0.828465
| 0.819142
| 0.794904
| 0.757924
| 0
| 0.018182
| 0.106165
| 5,661
| 160
| 85
| 35.38125
| 0.617787
| 0.217099
| 0
| 0.803922
| 0
| 0
| 0.238063
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.029412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b106562f12616be2afaea4727005a92a5c9cbdc9
| 35,726
|
py
|
Python
|
resnet_chestxray/model.py
|
britneyting/resnet_chestxray
|
7dcbb76883578d79ae31554c003c7dcfe93323de
|
[
"MIT"
] | 1
|
2021-12-03T13:08:46.000Z
|
2021-12-03T13:08:46.000Z
|
resnet_chestxray/model.py
|
britneyting/resnet_chestxray
|
7dcbb76883578d79ae31554c003c7dcfe93323de
|
[
"MIT"
] | null | null | null |
resnet_chestxray/model.py
|
britneyting/resnet_chestxray
|
7dcbb76883578d79ae31554c003c7dcfe93323de
|
[
"MIT"
] | 1
|
2021-07-01T19:33:11.000Z
|
2021-07-01T19:33:11.000Z
|
'''
Author: Ruizhi Liao
Model script to define and instantiate
residual network models
'''
import csv
import os
import numpy as np
from math import floor, ceil
import scipy.ndimage as ndimage
import torch
import torchvision
import torchvision.transforms as transforms
from torch.utils.tensorboard import SummaryWriter
import torch.nn as nn
import torch.nn.functional as F
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3,
stride=stride, padding=dilation,
groups=groups, bias=False, dilation=dilation)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1,
stride=stride, bias=False)
class BasicBlock(nn.Module):
__constants__ = ['downsample']
def __init__(self, inplanes, planes, stride=1,
downsample=None,
norm_layer=nn.BatchNorm2d):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = norm_layer(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = norm_layer(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class ResNet2048_7_2_1(nn.Module):
""" A residual network 7_2_1
with 7 "layers", 2x2 average pooling, and 1 fully connected layer.
"""
def __init__(self, block, blocks_per_layers, output_channels=4,
norm_layer=nn.BatchNorm2d, zero_init_residual=False):
""" Input batch_sizex2048x2048x1 ->
Conv1 batch_sizex512x512x8 ->
"layer1" batch_sizex256x256x8 ->
"layer2" batch_sizex128x128x16 ->
"layer3" batch_sizex64x64x32 ->
"layer4" batch_sizex32x32x64 ->
"layer5" batch_sizex16x16x128 ->
"layer6" batch_sizex8x8x192 ->
"layer7" batch_sizex4x4x192 ->
average pooling batch_sizex2x2x192 ->
fc layer batch_sizexoutput_channelsx1
"""
super(ResNet2048_7_2_1, self).__init__()
self._norm_layer = norm_layer
self.inplanes = 8
self.dilation = 1
self.conv1 = nn.Conv2d(1, self.inplanes, kernel_size=7, stride=4,
padding=3, bias=False)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_layer(block, 8, blocks_per_layers[0], stride=2)
self.layer2 = self._make_layer(block, 16, blocks_per_layers[1], stride=2)
self.layer3 = self._make_layer(block, 32, blocks_per_layers[2], stride=2)
self.layer4 = self._make_layer(block, 64, blocks_per_layers[3], stride=2)
self.layer5 = self._make_layer(block, 128, blocks_per_layers[4], stride=2)
self.layer6 = self._make_layer(block, 192, blocks_per_layers[5], stride=2)
self.layer7 = self._make_layer(block, 192, blocks_per_layers[6], stride=2)
self.avgpool = nn.AvgPool2d((2, 2))
self.fc1 = nn.Linear(768, output_channels)
self.softmax = nn.Softmax(dim=1)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out',
nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, num_of_blocks, stride=1):
norm_layer = self._norm_layer
downsample = None
if stride != 1 or self.inplanes != planes:
downsample = nn.Sequential(conv1x1(self.inplanes, planes, stride),
norm_layer(planes))
layers = []
layers.append(block(self.inplanes, planes, stride=stride, downsample=downsample,
norm_layer=norm_layer))
self.inplanes = planes
for _ in range(1, num_of_blocks):
layers.append(block(self.inplanes, planes, norm_layer=norm_layer))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.layer5(x)
x = self.layer6(x)
x = self.layer7(x)
x = self.avgpool(x)
z = torch.flatten(x, 1)
y_logits = self.fc1(z)
y = self.softmax(y_logits)
return y, z, y_logits
# based on
# https://github.com/huggingface/transformers/blob/v1.0.0/pytorch_transformers/modeling_utils.py
def save_pretrained(self, save_directory, epoch=-1):
""" Save a model with its configuration file to a directory, so that it
can be re-loaded using the `from_pretrained(save_directory)` class method.
"""
# Saving path should be a directory where the model and configuration can be saved
if not os.path.exists(save_directory):
os.makedirs(save_directory)
assert os.path.isdir(save_directory)
# Only save the model it-self if we are using distributed training
model_to_save = self.module if hasattr(self, 'module') else self
# If we save using the predefined names, we can load using `from_pretrained`
if epoch == -1:
output_model_file = os.path.join(save_directory, 'pytorch_model.bin')
else:
output_model_file = os.path.join(save_directory,
'pytorch_model_epoch'+str(epoch)+'.bin')
torch.save(model_to_save.state_dict(), output_model_file)
# based on
# https://github.com/huggingface/transformers/blob/v1.0.0/pytorch_transformers/modeling_utils.py
@classmethod
def from_pretrained(cls, pretrained_model_path, block, blocks_per_layers,
*inputs, **kwargs):
state_dict = kwargs.pop('state_dict', None)
output_loading_info = kwargs.pop('output_loading_info', False)
# Instantiate the model
model = cls(block, blocks_per_layers, **kwargs)
# if the user has not provided the ability to load in their own state dict, but our module
# in this case it is easier to just use save_pretrained and from_pretrained to read that
# saved checkpoint fully
if state_dict is None:
state_dict = torch.load(pretrained_model_path, map_location='cpu')
# Convert old format to new format if needed from a PyTorch state_dict
old_keys = []
new_keys = []
for key in state_dict.keys():
new_key = None
if 'gamma' in key:
new_key = key.replace('gamma', 'weight')
if 'beta' in key:
new_key = key.replace('beta', 'bias')
if new_key:
old_keys.append(key)
new_keys.append(new_key)
for old_key, new_key in zip(old_keys, new_keys):
state_dict[new_key] = state_dict.pop(old_key)
# Load from a PyTorch state_dict
missing_keys = []
unexpected_keys = []
error_msgs = []
# copy state_dict so _load_from_state_dict can modify it
metadata = getattr(state_dict, '_metadata', None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
def load(module, prefix=''):
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + '.')
load(model)
if len(missing_keys) > 0:
logger.info("Weights of {} not initialized from pretrained model: {}".format(
model.__class__.__name__, missing_keys))
if len(unexpected_keys) > 0:
logger.info("Weights from pretrained model not used in {}: {}".format(
model.__class__.__name__, unexpected_keys))
if len(error_msgs) > 0:
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
model.__class__.__name__, "\n\t".join(error_msgs)))
if output_loading_info:
loading_info = {"missing_keys": missing_keys, "unexpected_keys": unexpected_keys, "error_msgs": error_msgs}
return model, loading_info
return model
class ResNet1024_7_2_1(nn.Module):
""" A residual network 7_2_1
with 7 "layers", 2x2 average pooling, and 1 fully connected layer.
"""
def __init__(self, block, blocks_per_layers, output_channels=4,
norm_layer=nn.BatchNorm2d, zero_init_residual=False):
""" Input batch_sizex1024x1024x1 ->
Conv1 batch_sizex512x512x8 ->
"layer1" batch_sizex256x256x8 ->
"layer2" batch_sizex128x128x16 ->
"layer3" batch_sizex64x64x32 ->
"layer4" batch_sizex32x32x64 ->
"layer5" batch_sizex16x16x128 ->
"layer6" batch_sizex8x8x192 ->
"layer7" batch_sizex4x4x192 ->
average pooling batch_sizex2x2x192 ->
fc layer batch_sizexoutput_channelsx1
"""
super(ResNet1024_7_2_1, self).__init__()
self._norm_layer = norm_layer
self.inplanes = 8
self.dilation = 1
self.conv1 = nn.Conv2d(1, self.inplanes, kernel_size=5, stride=2,
padding=2, bias=False)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_layer(block, 8, blocks_per_layers[0], stride=2)
self.layer2 = self._make_layer(block, 16, blocks_per_layers[1], stride=2)
self.layer3 = self._make_layer(block, 32, blocks_per_layers[2], stride=2)
self.layer4 = self._make_layer(block, 64, blocks_per_layers[3], stride=2)
self.layer5 = self._make_layer(block, 128, blocks_per_layers[4], stride=2)
self.layer6 = self._make_layer(block, 192, blocks_per_layers[5], stride=2)
self.layer7 = self._make_layer(block, 192, blocks_per_layers[6], stride=2)
self.avgpool = nn.AvgPool2d((2, 2))
self.fc1 = nn.Linear(768, output_channels)
self.softmax = nn.Softmax(dim=1)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out',
nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, num_of_blocks, stride=1):
norm_layer = self._norm_layer
downsample = None
if stride != 1 or self.inplanes != planes:
downsample = nn.Sequential(conv1x1(self.inplanes, planes, stride),
norm_layer(planes))
layers = []
layers.append(block(self.inplanes, planes, stride=stride, downsample=downsample,
norm_layer=norm_layer))
self.inplanes = planes
for _ in range(1, num_of_blocks):
layers.append(block(self.inplanes, planes, norm_layer=norm_layer))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.layer5(x)
x = self.layer6(x)
x = self.layer7(x)
x = self.avgpool(x)
z = torch.flatten(x, 1)
y_logits = self.fc1(z)
y = self.softmax(y_logits)
return y, z, y_logits
# based on
# https://github.com/huggingface/transformers/blob/v1.0.0/pytorch_transformers/modeling_utils.py
def save_pretrained(self, save_directory, epoch=-1):
""" Save a model with its configuration file to a directory, so that it
can be re-loaded using the `from_pretrained(save_directory)` class method.
"""
# Saving path should be a directory where the model and configuration can be saved
if not os.path.exists(save_directory):
os.makedirs(save_directory)
assert os.path.isdir(save_directory)
# Only save the model it-self if we are using distributed training
model_to_save = self.module if hasattr(self, 'module') else self
# If we save using the predefined names, we can load using `from_pretrained`
if epoch == -1:
output_model_file = os.path.join(save_directory, 'pytorch_model.bin')
else:
output_model_file = os.path.join(save_directory,
'pytorch_model_epoch'+str(epoch)+'.bin')
torch.save(model_to_save.state_dict(), output_model_file)
# based on
# https://github.com/huggingface/transformers/blob/v1.0.0/pytorch_transformers/modeling_utils.py
@classmethod
def from_pretrained(cls, pretrained_model_path, block, blocks_per_layers,
*inputs, **kwargs):
state_dict = kwargs.pop('state_dict', None)
output_loading_info = kwargs.pop('output_loading_info', False)
# Instantiate the model
model = cls(block, blocks_per_layers, **kwargs)
# if the user has not provided the ability to load in their own state dict, but our module
# in this case it is easier to just use save_pretrained and from_pretrained to read that
# saved checkpoint fully
if state_dict is None:
state_dict = torch.load(pretrained_model_path, map_location='cpu')
# Convert old format to new format if needed from a PyTorch state_dict
old_keys = []
new_keys = []
for key in state_dict.keys():
new_key = None
if 'gamma' in key:
new_key = key.replace('gamma', 'weight')
if 'beta' in key:
new_key = key.replace('beta', 'bias')
if new_key:
old_keys.append(key)
new_keys.append(new_key)
for old_key, new_key in zip(old_keys, new_keys):
state_dict[new_key] = state_dict.pop(old_key)
# Load from a PyTorch state_dict
missing_keys = []
unexpected_keys = []
error_msgs = []
# copy state_dict so _load_from_state_dict can modify it
metadata = getattr(state_dict, '_metadata', None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
def load(module, prefix=''):
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + '.')
load(model)
if len(missing_keys) > 0:
logger.info("Weights of {} not initialized from pretrained model: {}".format(
model.__class__.__name__, missing_keys))
if len(unexpected_keys) > 0:
logger.info("Weights from pretrained model not used in {}: {}".format(
model.__class__.__name__, unexpected_keys))
if len(error_msgs) > 0:
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
model.__class__.__name__, "\n\t".join(error_msgs)))
if output_loading_info:
loading_info = {"missing_keys": missing_keys, "unexpected_keys": unexpected_keys, "error_msgs": error_msgs}
return model, loading_info
return model
class ResNet512_6_2_1(nn.Module):
""" A residual network 6_2_1
with 6 "layers", 2x2 average pooling, and 1 fully connected layer.
"""
def __init__(self, block, blocks_per_layers, output_channels=4,
norm_layer=nn.BatchNorm2d, zero_init_residual=False):
""" Input batch_sizex512x512x1 ->
Conv1 batch_sizex256x256x8 ->
"layer1" batch_sizex128x128x16 ->
"layer2" batch_sizex64x64x32 ->
"layer3" batch_sizex32x32x64 ->
"layer4" batch_sizex16x16x128 ->
"layer5" batch_sizex8x8x192 ->
"layer6" batch_sizex4x4x192 ->
average pooling batch_sizex2x2x192 ->
fc layer batch_sizexoutput_channelsx1
"""
super(ResNet512_6_2_1, self).__init__()
self._norm_layer = norm_layer
self.inplanes = 8
self.dilation = 1
self.conv1 = nn.Conv2d(1, self.inplanes, kernel_size=3, stride=2,
padding=1, bias=False)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_layer(block, 8, blocks_per_layers[0], stride=2)
self.layer2 = self._make_layer(block, 16, blocks_per_layers[1], stride=2)
self.layer3 = self._make_layer(block, 32, blocks_per_layers[2], stride=2)
self.layer4 = self._make_layer(block, 64, blocks_per_layers[3], stride=2)
self.layer5 = self._make_layer(block, 128, blocks_per_layers[4], stride=2)
self.layer6 = self._make_layer(block, 192, blocks_per_layers[5], stride=2)
self.avgpool = nn.AvgPool2d((2, 2))
self.fc1 = nn.Linear(768, output_channels)
self.softmax = nn.Softmax(dim=1)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out',
nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, num_of_blocks, stride=1):
norm_layer = self._norm_layer
downsample = None
if stride != 1 or self.inplanes != planes:
downsample = nn.Sequential(conv1x1(self.inplanes, planes, stride),
norm_layer(planes))
layers = []
layers.append(block(self.inplanes, planes, stride=stride, downsample=downsample,
norm_layer=norm_layer))
self.inplanes = planes
for _ in range(1, num_of_blocks):
layers.append(block(self.inplanes, planes, norm_layer=norm_layer))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.layer5(x)
x = self.layer6(x)
x = self.avgpool(x)
z = torch.flatten(x, 1)
y_logits = self.fc1(z)
y = self.softmax(y_logits)
return y, z, y_logits
# based on
# https://github.com/huggingface/transformers/blob/v1.0.0/pytorch_transformers/modeling_utils.py
def save_pretrained(self, save_directory, epoch=-1):
""" Save a model with its configuration file to a directory, so that it
can be re-loaded using the `from_pretrained(save_directory)` class method.
"""
# Saving path should be a directory where the model and configuration can be saved
if not os.path.exists(save_directory):
os.makedirs(save_directory)
assert os.path.isdir(save_directory)
# Only save the model it-self if we are using distributed training
model_to_save = self.module if hasattr(self, 'module') else self
# If we save using the predefined names, we can load using `from_pretrained`
if epoch == -1:
output_model_file = os.path.join(save_directory, 'pytorch_model.bin')
else:
output_model_file = os.path.join(save_directory,
'pytorch_model_epoch'+str(epoch)+'.bin')
torch.save(model_to_save.state_dict(), output_model_file)
# based on
# https://github.com/huggingface/transformers/blob/v1.0.0/pytorch_transformers/modeling_utils.py
@classmethod
def from_pretrained(cls, pretrained_model_path, block, blocks_per_layers,
*inputs, **kwargs):
state_dict = kwargs.pop('state_dict', None)
output_loading_info = kwargs.pop('output_loading_info', False)
# Instantiate the model
model = cls(block, blocks_per_layers, **kwargs)
# if the user has not provided the ability to load in their own state dict, but our module
# in this case it is easier to just use save_pretrained and from_pretrained to read that
# saved checkpoint fully
if state_dict is None:
state_dict = torch.load(pretrained_model_path, map_location='cpu')
# Convert old format to new format if needed from a PyTorch state_dict
old_keys = []
new_keys = []
for key in state_dict.keys():
new_key = None
if 'gamma' in key:
new_key = key.replace('gamma', 'weight')
if 'beta' in key:
new_key = key.replace('beta', 'bias')
if new_key:
old_keys.append(key)
new_keys.append(new_key)
for old_key, new_key in zip(old_keys, new_keys):
state_dict[new_key] = state_dict.pop(old_key)
# Load from a PyTorch state_dict
missing_keys = []
unexpected_keys = []
error_msgs = []
# copy state_dict so _load_from_state_dict can modify it
metadata = getattr(state_dict, '_metadata', None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
def load(module, prefix=''):
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + '.')
load(model)
if len(missing_keys) > 0:
logger.info("Weights of {} not initialized from pretrained model: {}".format(
model.__class__.__name__, missing_keys))
if len(unexpected_keys) > 0:
logger.info("Weights from pretrained model not used in {}: {}".format(
model.__class__.__name__, unexpected_keys))
if len(error_msgs) > 0:
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
model.__class__.__name__, "\n\t".join(error_msgs)))
if output_loading_info:
loading_info = {"missing_keys": missing_keys, "unexpected_keys": unexpected_keys, "error_msgs": error_msgs}
return model, loading_info
return model
class ResNet256_6_2_1(nn.Module):
""" A residual network 6_2_1
with 6 "layers", 2x2 average pooling, and 1 fully connected layer.
"""
def __init__(self, block, blocks_per_layers, output_channels=4,
norm_layer=nn.BatchNorm2d, zero_init_residual=False):
""" Input batch_sizex256x256x1 ->
Conv1 batch_sizex256x256x8 ->
"layer1" batch_sizex128x128x16 ->
"layer2" batch_sizex64x64x32 ->
"layer3" batch_sizex32x32x64 ->
"layer4" batch_sizex16x16x128 ->
"layer5" batch_sizex8x8x192 ->
"layer6" batch_sizex4x4x192 ->
average pooling batch_sizex2x2x192 ->
fc layer batch_sizexoutput_channelsx1
"""
super(ResNet256_6_2_1, self).__init__()
self._norm_layer = norm_layer
self.inplanes = 8
self.dilation = 1
self.conv1 = nn.Conv2d(1, self.inplanes, kernel_size=3, stride=1,
padding=1, bias=False)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_layer(block, 8, blocks_per_layers[0], stride=2)
self.layer2 = self._make_layer(block, 16, blocks_per_layers[1], stride=2)
self.layer3 = self._make_layer(block, 32, blocks_per_layers[2], stride=2)
self.layer4 = self._make_layer(block, 64, blocks_per_layers[3], stride=2)
self.layer5 = self._make_layer(block, 128, blocks_per_layers[4], stride=2)
self.layer6 = self._make_layer(block, 192, blocks_per_layers[5], stride=2)
self.avgpool = nn.AvgPool2d((2, 2))
self.fc1 = nn.Linear(768, output_channels)
self.softmax = nn.Softmax(dim=1)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out',
nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, num_of_blocks, stride=1):
norm_layer = self._norm_layer
downsample = None
if stride != 1 or self.inplanes != planes:
downsample = nn.Sequential(conv1x1(self.inplanes, planes, stride),
norm_layer(planes))
layers = []
layers.append(block(self.inplanes, planes, stride=stride, downsample=downsample,
norm_layer=norm_layer))
self.inplanes = planes
for _ in range(1, num_of_blocks):
layers.append(block(self.inplanes, planes, norm_layer=norm_layer))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.layer5(x)
x = self.layer6(x)
x = self.avgpool(x)
z = torch.flatten(x, 1)
y_logits = self.fc1(z)
y = self.softmax(y_logits)
return y, z, y_logits
# based on
# https://github.com/huggingface/transformers/blob/v1.0.0/pytorch_transformers/modeling_utils.py
def save_pretrained(self, save_directory, epoch=-1):
""" Save a model with its configuration file to a directory, so that it
can be re-loaded using the `from_pretrained(save_directory)` class method.
"""
# Saving path should be a directory where the model and configuration can be saved
if not os.path.exists(save_directory):
os.makedirs(save_directory)
assert os.path.isdir(save_directory)
# Only save the model it-self if we are using distributed training
model_to_save = self.module if hasattr(self, 'module') else self
# If we save using the predefined names, we can load using `from_pretrained`
if epoch == -1:
output_model_file = os.path.join(save_directory, 'pytorch_model.bin')
else:
output_model_file = os.path.join(save_directory,
'pytorch_model_epoch'+str(epoch)+'.bin')
torch.save(model_to_save.state_dict(), output_model_file)
# based on
# https://github.com/huggingface/transformers/blob/v1.0.0/pytorch_transformers/modeling_utils.py
@classmethod
def from_pretrained(cls, pretrained_model_path, block, blocks_per_layers,
output_channels, *inputs, **kwargs):
state_dict = kwargs.pop('state_dict', None)
output_loading_info = kwargs.pop('output_loading_info', False)
# Instantiate the model
model = cls(block, blocks_per_layers, output_channels=output_channels, **kwargs)
# if the user has not provided the ability to load in their own state dict, but our module
# in this case it is easier to just use save_pretrained and from_pretrained to read that
# saved checkpoint fully
if state_dict is None:
state_dict = torch.load(pretrained_model_path, map_location='cpu')
# Convert old format to new format if needed from a PyTorch state_dict
old_keys = []
new_keys = []
for key in state_dict.keys():
new_key = None
if 'gamma' in key:
new_key = key.replace('gamma', 'weight')
if 'beta' in key:
new_key = key.replace('beta', 'bias')
if new_key:
old_keys.append(key)
new_keys.append(new_key)
for old_key, new_key in zip(old_keys, new_keys):
state_dict[new_key] = state_dict.pop(old_key)
# Load from a PyTorch state_dict
missing_keys = []
unexpected_keys = []
error_msgs = []
# copy state_dict so _load_from_state_dict can modify it
metadata = getattr(state_dict, '_metadata', None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
def load(module, prefix=''):
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + '.')
load(model)
if len(missing_keys) > 0:
logger.info("Weights of {} not initialized from pretrained model: {}".format(
model.__class__.__name__, missing_keys))
if len(unexpected_keys) > 0:
logger.info("Weights from pretrained model not used in {}: {}".format(
model.__class__.__name__, unexpected_keys))
if len(error_msgs) > 0:
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
model.__class__.__name__, "\n\t".join(error_msgs)))
if output_loading_info:
loading_info = {"missing_keys": missing_keys, "unexpected_keys": unexpected_keys, "error_msgs": error_msgs}
return model, loading_info
return model
def build_resnet2048_7_2_1(block=BasicBlock, blocks_per_layers=[2, 2, 2, 2, 2, 2, 2],
pretrained=False, pretrained_model_path=None, output_channels=4, **kwargs):
model = ResNet2048_7_2_1(block, blocks_per_layers, output_channels=output_channels, **kwargs)
if pretrained:
model = model.from_pretrained(pretrained_model_path, block, blocks_per_layers,
output_channels, **kwargs)
return model
def build_resnet1024_7_2_1(block=BasicBlock, blocks_per_layers=[2, 2, 2, 2, 2, 2, 2],
pretrained=False, pretrained_model_path=None, output_channels=4, **kwargs):
model = ResNet1024_7_2_1(block, blocks_per_layers, output_channels=output_channels, **kwargs)
if pretrained:
model = model.from_pretrained(pretrained_model_path, block, blocks_per_layers,
output_channels, **kwargs)
return model
def build_resnet512_6_2_1(block=BasicBlock, blocks_per_layers=[2, 2, 2, 2, 2, 2],
pretrained=False, pretrained_model_path=None, output_channels=4, **kwargs):
model = ResNet512_6_2_1(block, blocks_per_layers, output_channels=output_channels, **kwargs)
if pretrained:
model = model.from_pretrained(pretrained_model_path, block, blocks_per_layers,
output_channels, **kwargs)
return model
def build_resnet256_6_2_1(block=BasicBlock, blocks_per_layers=[2, 2, 2, 2, 2, 2],
pretrained=False, pretrained_model_path=None, output_channels=4, **kwargs):
model = ResNet256_6_2_1(block, blocks_per_layers, output_channels=output_channels, **kwargs)
if pretrained:
model = model.from_pretrained(pretrained_model_path, block, blocks_per_layers,
output_channels, **kwargs)
return model
| 41.981199
| 119
| 0.602083
| 4,476
| 35,726
| 4.577301
| 0.067024
| 0.035143
| 0.036607
| 0.022843
| 0.946212
| 0.942503
| 0.94016
| 0.938745
| 0.938745
| 0.934059
| 0
| 0.035023
| 0.301489
| 35,726
| 851
| 120
| 41.981199
| 0.785975
| 0.194228
| 0
| 0.887299
| 0
| 0
| 0.045326
| 0
| 0
| 0
| 0
| 0
| 0.007156
| 1
| 0.057245
| false
| 0
| 0.019678
| 0
| 0.128801
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b189191e8ffed79e8d3e3507737c29eb67aedf28
| 81,525
|
py
|
Python
|
src/python/dxpy/api.py
|
scalavision/dx-toolkit
|
e4a2361b3bb6424d7e1ad2bcfc65d649b02d0496
|
[
"Apache-2.0"
] | null | null | null |
src/python/dxpy/api.py
|
scalavision/dx-toolkit
|
e4a2361b3bb6424d7e1ad2bcfc65d649b02d0496
|
[
"Apache-2.0"
] | null | null | null |
src/python/dxpy/api.py
|
scalavision/dx-toolkit
|
e4a2361b3bb6424d7e1ad2bcfc65d649b02d0496
|
[
"Apache-2.0"
] | null | null | null |
# Do not modify this file by hand.
#
# It is automatically generated by src/api_wrappers/generatePythonAPIWrappers.py.
# (Run make api_wrappers to update it.)
from __future__ import print_function, unicode_literals, division, absolute_import
import sys
from dxpy import DXHTTPRequest
from dxpy.utils import Nonce
def analysis_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def analysis_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def analysis_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def analysis_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def analysis_terminate(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/terminate API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2Fterminate
"""
return DXHTTPRequest('/%s/terminate' % object_id, input_params, always_retry=always_retry, **kwargs)
def app_add_authorized_users(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/addAuthorizedUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/addAuthorizedUsers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addAuthorizedUsers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_add_categories(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/addCategories API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/addCategories
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addCategories' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_add_developers(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/addDevelopers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/addDevelopers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addDevelopers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_add_tags(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/addTags
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addTags' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_delete(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/delete API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/delete
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/delete' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_describe(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/describe
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/describe' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_get(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/get API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/get
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/get' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_install(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/install API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/install
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/install' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_list_authorized_users(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/listAuthorizedUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/listAuthorizedUsers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/listAuthorizedUsers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_list_categories(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/listCategories API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/listCategories
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/listCategories' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_list_developers(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/listDevelopers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/listDevelopers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/listDevelopers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_publish(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/publish API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/publish
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/publish' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_remove_authorized_users(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/removeAuthorizedUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/removeAuthorizedUsers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeAuthorizedUsers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_remove_categories(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/removeCategories API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/removeCategories
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeCategories' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_remove_developers(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/removeDevelopers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/removeDevelopers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeDevelopers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_remove_tags(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/removeTags
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeTags' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_run(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/run API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/run
"""
input_params_cp = Nonce.update_nonce(input_params)
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/run' % fully_qualified_version, input_params_cp, always_retry=always_retry, **kwargs)
def app_validate_batch(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/validateBatch API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/validateBatch
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/validateBatch' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_uninstall(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/uninstall API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/uninstall
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/uninstall' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_update(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/update
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/update' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app/new
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/app/new', input_params_cp, always_retry=always_retry, **kwargs)
def applet_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fapplet-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_get(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/get API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fapplet-xxxx%2Fget
"""
return DXHTTPRequest('/%s/get' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_validate_batch(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/validateBatch API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fapplet-xxxx%2FvalidateBatch
"""
return DXHTTPRequest('/%s/validateBatch' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_run(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/run API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fapplet-xxxx%2Frun
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/%s/run' % object_id, input_params_cp, always_retry=always_retry, **kwargs)
def applet_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fapplet%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/applet/new', input_params_cp, always_retry=always_retry, **kwargs)
def container_clone(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/clone API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2Fclone
"""
return DXHTTPRequest('/%s/clone' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /container-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Containers-for-Execution#API-method%3A-%2Fcontainer-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_destroy(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /container-xxxx/destroy API method.
"""
return DXHTTPRequest('/%s/destroy' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_list_folder(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /container-xxxx/listFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FlistFolder
"""
return DXHTTPRequest('/%s/listFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_move(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/move API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2Fmove
"""
return DXHTTPRequest('/%s/move' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_new_folder(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /container-xxxx/newFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FnewFolder
"""
return DXHTTPRequest('/%s/newFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_remove_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/removeFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveFolder
"""
return DXHTTPRequest('/%s/removeFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_remove_objects(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/removeObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveObjects
"""
return DXHTTPRequest('/%s/removeObjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_rename_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/renameFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FrenameFolder
"""
return DXHTTPRequest('/%s/renameFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_add_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/addTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FaddTypes
"""
return DXHTTPRequest('/%s/addTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/describe API method.
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_relocate(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /database-xxxx/relocate API method.
"""
return DXHTTPRequest('/%s/relocate' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_remove_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/removeTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FremoveTypes
"""
return DXHTTPRequest('/%s/removeTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def database_set_visibility(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/setVisibility API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Visibility#API-method%3A-%2Fclass-xxxx%2FsetVisibility
"""
return DXHTTPRequest('/%s/setVisibility' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_add_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/addTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FaddTypes
"""
return DXHTTPRequest('/%s/addTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_close(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/close API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fclose
"""
return DXHTTPRequest('/%s/close' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_download(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/download API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fdownload
"""
return DXHTTPRequest('/%s/download' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_remove_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/removeTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FremoveTypes
"""
return DXHTTPRequest('/%s/removeTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_set_visibility(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/setVisibility API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Visibility#API-method%3A-%2Fclass-xxxx%2FsetVisibility
"""
return DXHTTPRequest('/%s/setVisibility' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_upload(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/upload API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fupload
"""
return DXHTTPRequest('/%s/upload' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/file/new', input_params_cp, always_retry=always_retry, **kwargs)
def global_workflow_add_authorized_users(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/addAuthorizedUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/addAuthorizedUsers
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addAuthorizedUsers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_add_categories(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/addCategories API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/addCategories
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addCategories' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_add_developers(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/addDevelopers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/addDevelopers
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addDevelopers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_add_tags(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/addTags
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addTags' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_delete(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/delete API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/delete
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/delete' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_describe(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/describe
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/describe' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_list_authorized_users(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/listAuthorizedUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/listAuthorizedUsers
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/listAuthorizedUsers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_list_categories(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/listCategories API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/listCategories
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/listCategories' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_list_developers(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/listDevelopers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/listDevelopers
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/listDevelopers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_publish(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/publish API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/publish
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/publish' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_remove_authorized_users(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/removeAuthorizedUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/removeAuthorizedUsers
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeAuthorizedUsers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_remove_categories(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/removeCategories API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/removeCategories
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeCategories' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_remove_developers(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/removeDevelopers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/removeDevelopers
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeDevelopers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_remove_tags(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/removeTags
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeTags' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_run(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/run API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/run
"""
input_params_cp = Nonce.update_nonce(input_params)
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/run' % fully_qualified_version, input_params_cp, always_retry=always_retry, **kwargs)
def global_workflow_update(name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow-xxxx%5B/yyyy%5D/update
"""
fully_qualified_version = name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/update' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def global_workflow_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /globalworkflow/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Global-Workflows#API-method:-/globalworkflow/new
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/globalworkflow/new', input_params_cp, always_retry=always_retry, **kwargs)
def gtable_add_rows(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/addRows API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2FaddRows
"""
return DXHTTPRequest('/%s/addRows' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_add_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/addTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FaddTypes
"""
return DXHTTPRequest('/%s/addTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_close(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/close API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2Fclose
"""
return DXHTTPRequest('/%s/close' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_get(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/get API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2Fget
"""
return DXHTTPRequest('/%s/get' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_next_part(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/nextPart API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2FnextPart
"""
return DXHTTPRequest('/%s/nextPart' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_remove_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/removeTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FremoveTypes
"""
return DXHTTPRequest('/%s/removeTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_set_visibility(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/setVisibility API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Visibility#API-method%3A-%2Fclass-xxxx%2FsetVisibility
"""
return DXHTTPRequest('/%s/setVisibility' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_new(input_params={}, always_retry=False, **kwargs):
"""
Invokes the /gtable/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable%2Fnew
"""
return DXHTTPRequest('/gtable/new', input_params, always_retry=always_retry, **kwargs)
def job_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_get_log(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /job-xxxx/getLog API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FgetLog
"""
return DXHTTPRequest('/%s/getLog' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_terminate(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/terminate API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2Fterminate
"""
return DXHTTPRequest('/%s/terminate' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/job/new', input_params_cp, always_retry=always_retry, **kwargs)
def notifications_get(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /notifications/get API method.
"""
return DXHTTPRequest('/notifications/get', input_params, always_retry=always_retry, **kwargs)
def notifications_mark_read(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /notifications/markRead API method.
"""
return DXHTTPRequest('/notifications/markRead', input_params, always_retry=always_retry, **kwargs)
def org_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_find_members(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/findMembers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FfindMembers
"""
return DXHTTPRequest('/%s/findMembers' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_find_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/findProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FfindProjects
"""
return DXHTTPRequest('/%s/findProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_find_apps(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/findApps API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FfindApps
"""
return DXHTTPRequest('/%s/findApps' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_invite(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/invite API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2Finvite
"""
return DXHTTPRequest('/%s/invite' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_remove_member(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/removeMember API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FremoveMember
"""
return DXHTTPRequest('/%s/removeMember' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_set_member_access(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/setMemberAccess API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FsetMemberAccess
"""
return DXHTTPRequest('/%s/setMemberAccess' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_update(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/org/new', input_params_cp, always_retry=always_retry, **kwargs)
def project_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_clone(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/clone API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2Fclone
"""
return DXHTTPRequest('/%s/clone' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_decrease_permissions(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/decreasePermissions API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Project-Permissions-and-Sharing#API-method%3A-%2Fproject-xxxx%2FdecreasePermissions
"""
return DXHTTPRequest('/%s/decreasePermissions' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_destroy(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/destroy API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fdestroy
"""
return DXHTTPRequest('/%s/destroy' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_invite(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/invite API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Project-Permissions-and-Sharing#API-method%3A-%2Fproject-xxxx%2Finvite
"""
return DXHTTPRequest('/%s/invite' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_leave(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/leave API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Project-Permissions-and-Sharing#API-method%3A-%2Fproject-xxxx%2Fleave
"""
return DXHTTPRequest('/%s/leave' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_list_folder(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/listFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FlistFolder
"""
return DXHTTPRequest('/%s/listFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_move(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/move API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2Fmove
"""
return DXHTTPRequest('/%s/move' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_new_folder(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/newFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FnewFolder
"""
return DXHTTPRequest('/%s/newFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_remove_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/removeFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveFolder
"""
return DXHTTPRequest('/%s/removeFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_remove_objects(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/removeObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveObjects
"""
return DXHTTPRequest('/%s/removeObjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_rename_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/renameFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FrenameFolder
"""
return DXHTTPRequest('/%s/renameFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_transfer(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/transfer API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Project-Permissions-and-Sharing#API-method%3A-%2Fproject-xxxx%2Ftransfer
"""
return DXHTTPRequest('/%s/transfer' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_update(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_update_sponsorship(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/updateSponsorship API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2FupdateSponsorship
"""
return DXHTTPRequest('/%s/updateSponsorship' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_new(input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject%2Fnew
"""
return DXHTTPRequest('/project/new', input_params, always_retry=always_retry, **kwargs)
def record_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_add_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/addTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FaddTypes
"""
return DXHTTPRequest('/%s/addTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_close(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/close API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Data-Object-Lifecycle#API-method%3A-%2Fclass-xxxx%2Fclose
"""
return DXHTTPRequest('/%s/close' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Records#API-method%3A-%2Frecord-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_remove_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/removeTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FremoveTypes
"""
return DXHTTPRequest('/%s/removeTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_set_visibility(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/setVisibility API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Visibility#API-method%3A-%2Fclass-xxxx%2FsetVisibility
"""
return DXHTTPRequest('/%s/setVisibility' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Records#API-method%3A-%2Frecord%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/record/new', input_params_cp, always_retry=always_retry, **kwargs)
def system_describe_data_objects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/describeDataObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeDataObjects
"""
return DXHTTPRequest('/system/describeDataObjects', input_params, always_retry=always_retry, **kwargs)
def system_describe_executions(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/describeExecutions API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeExecutions
"""
return DXHTTPRequest('/system/describeExecutions', input_params, always_retry=always_retry, **kwargs)
def system_describe_projects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/describeProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeProjects
"""
return DXHTTPRequest('/system/describeProjects', input_params, always_retry=always_retry, **kwargs)
def system_find_affiliates(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findAffiliates API method.
"""
return DXHTTPRequest('/system/findAffiliates', input_params, always_retry=always_retry, **kwargs)
def system_find_apps(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findApps API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindApps
"""
return DXHTTPRequest('/system/findApps', input_params, always_retry=always_retry, **kwargs)
def system_find_data_objects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findDataObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindDataObjects
"""
return DXHTTPRequest('/system/findDataObjects', input_params, always_retry=always_retry, **kwargs)
def system_find_global_workflows(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findGlobalWorkflows API method.
"""
return DXHTTPRequest('/system/findGlobalWorkflows', input_params, always_retry=always_retry, **kwargs)
def system_resolve_data_objects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/resolveDataObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/resolveDataObjects
"""
return DXHTTPRequest('/system/resolveDataObjects', input_params, always_retry=always_retry, **kwargs)
def system_find_executions(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findExecutions API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindExecutions
"""
return DXHTTPRequest('/system/findExecutions', input_params, always_retry=always_retry, **kwargs)
def system_find_analyses(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findAnalyses API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindAnalyses
"""
return DXHTTPRequest('/system/findAnalyses', input_params, always_retry=always_retry, **kwargs)
def system_find_databases(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findDatabases API method.
"""
return DXHTTPRequest('/system/findDatabases', input_params, always_retry=always_retry, **kwargs)
def system_find_jobs(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findJobs API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindJobs
"""
return DXHTTPRequest('/system/findJobs', input_params, always_retry=always_retry, **kwargs)
def system_find_projects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindProjects
"""
return DXHTTPRequest('/system/findProjects', input_params, always_retry=always_retry, **kwargs)
def system_find_users(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindUsers
"""
return DXHTTPRequest('/system/findUsers', input_params, always_retry=always_retry, **kwargs)
def system_find_project_members(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findProjectMembers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method:-/system/findProjectMembers
"""
return DXHTTPRequest('/system/findProjectMembers', input_params, always_retry=always_retry, **kwargs)
def system_find_orgs(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findOrgs API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method:-/system/findOrgs
"""
return DXHTTPRequest('/system/findOrgs', input_params, always_retry=always_retry, **kwargs)
def system_generate_batch_inputs(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/generateBatchInputs API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method:-/system/generateBatchInputs
"""
return DXHTTPRequest('/system/generateBatchInputs', input_params, always_retry=always_retry, **kwargs)
def system_global_search(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/globalSearch API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method:-/system/globalSearch
"""
return DXHTTPRequest('/system/globalSearch', input_params, always_retry=always_retry, **kwargs)
def system_greet(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/greet API method.
"""
return DXHTTPRequest('/system/greet', input_params, always_retry=always_retry, **kwargs)
def system_headers(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/headers API method.
"""
return DXHTTPRequest('/system/headers', input_params, always_retry=always_retry, **kwargs)
def system_shorten_url(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/shortenURL API method.
"""
return DXHTTPRequest('/system/shortenURL', input_params, always_retry=always_retry, **kwargs)
def system_whoami(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/whoami API method.
"""
return DXHTTPRequest('/system/whoami', input_params, always_retry=always_retry, **kwargs)
def user_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /user-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Users#API-method%3A-%2Fuser-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def user_update(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /user-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Users#API-method%3A-%2Fuser-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_add_stage(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/addStage API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FaddStage
"""
return DXHTTPRequest('/%s/addStage' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_add_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/addTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FaddTypes
"""
return DXHTTPRequest('/%s/addTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_close(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/close API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Data-Object-Lifecycle#API-method%3A-%2Fclass-xxxx%2Fclose
"""
return DXHTTPRequest('/%s/close' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_dry_run(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/dryRun API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FdryRun
"""
return DXHTTPRequest('/%s/dryRun' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_is_stage_compatible(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/isStageCompatible API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FisStageCompatible
"""
return DXHTTPRequest('/%s/isStageCompatible' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_move_stage(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/moveStage API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FmoveStage
"""
return DXHTTPRequest('/%s/moveStage' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_overwrite(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/overwrite API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Foverwrite
"""
return DXHTTPRequest('/%s/overwrite' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_remove_stage(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/removeStage API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FremoveStage
"""
return DXHTTPRequest('/%s/removeStage' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_remove_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/removeTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FremoveTypes
"""
return DXHTTPRequest('/%s/removeTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_run(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/run API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Frun
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/%s/run' % object_id, input_params_cp, always_retry=always_retry, **kwargs)
def workflow_validate_batch(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/validateBatch API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FvalidateBatch
"""
return DXHTTPRequest('/%s/validateBatch' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_set_stage_inputs(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/setStageInputs API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FsetStageInputs
"""
return DXHTTPRequest('/%s/setStageInputs' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_set_visibility(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/setVisibility API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Visibility#API-method%3A-%2Fclass-xxxx%2FsetVisibility
"""
return DXHTTPRequest('/%s/setVisibility' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_update(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_update_stage_executable(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/updateStageExecutable API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FupdateStageExecutable
"""
return DXHTTPRequest('/%s/updateStageExecutable' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/workflow/new', input_params_cp, always_retry=always_retry, **kwargs)
| 48.846615
| 158
| 0.733554
| 11,049
| 81,525
| 5.226717
| 0.027785
| 0.116571
| 0.116571
| 0.150857
| 0.945662
| 0.939082
| 0.937593
| 0.93484
| 0.927203
| 0.896519
| 0
| 0.015155
| 0.121018
| 81,525
| 1,668
| 159
| 48.875899
| 0.790745
| 0.419969
| 0
| 0.415217
| 1
| 0
| 0.068624
| 0.012059
| 0
| 0
| 0
| 0
| 0
| 1
| 0.443478
| false
| 0
| 0.008696
| 0
| 0.895652
| 0.002174
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b197d470efff118b359dcf9674cc481f84e33dee
| 9,604
|
py
|
Python
|
carbondesign/tests/test_tile_html.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tests/test_tile_html.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tests/test_tile_html.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
# pylint:disable=missing-module-docstring,missing-class-docstring,missing-function-docstring,line-too-long
from django import forms
#-
from .base import compare_template, SimpleTestCase
class DummyForm(forms.Form):
tiles = forms.ChoiceField(required=False,
label="Number input label",
choices=(
('tile', "tile"),
('tile-2', "tile-2"),
))
class TileHtmlTest(SimpleTestCase):
maxDiff = None
def test_default(self):
template = """
{% load carbondesign %}
{% Tile %}{% endTile %}
"""
expected = """
<div class="bx--tile">
</div>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
def test_clickable(self):
template = """
{% load carbondesign %}
{% Tile mode="clickable" %}{% endTile %}
"""
expected = """
<a data-tile="clickable" class="bx--tile bx--tile--clickable"
tabindex="0">
</a>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
def test_expandable(self):
template = """
{% load carbondesign %}
{% Tile mode="expandable" %}
{% Slot 'above' %}
<!-- Above the fold content here -->
{% endSlot %}
<!-- Rest of the content here -->
{% endTile %}
"""
expected = """
<div data-tile="expandable" class="bx--tile bx--tile--expandable"
tabindex="0">
<button aria-label="expand menu" class="bx--tile__chevron">
<svg focusable="false" preserveAspectRatio="xMidYMid meet"
xmlns="http://www.w3.org/2000/svg" fill="currentColor" width="16"
height="16" viewBox="0 0 16 16" aria-hidden="true">
<path d="M8 11L3 6 3.7 5.3 8 9.6 12.3 5.3 13 6z"></path>
</svg>
</button>
<div class="bx--tile-content">
<span data-tile-atf class="bx--tile-content__above-the-fold">
<!-- Above the fold content here -->
</span>
<span class="bx--tile-content__below-the-fold">
<!-- Rest of the content here -->
</span>
</div>
</div>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
def test_selectable(self):
form = DummyForm(data={'tiles': ''})
context = {'form': form}
template = """
{% load carbondesign %}
{% TileSelect form.tiles value="tile" id="tile-id" %}
<!-- Tile content here -->
{% endTileSelect %}
"""
expected = """
<input type="checkbox" name="tiles" value="tile" id="tile-id" title="tile" class="bx--tile-input" tabindex="-1" data-tile-input>
<label for="tile-id" class="bx--tile bx--tile--selectable"
data-tile="selectable" tabindex="0" aria-label="tile">
<div class="bx--tile__checkmark">
<svg focusable="false" preserveAspectRatio="xMidYMid meet"
xmlns="http://www.w3.org/2000/svg" fill="currentColor" width="16"
height="16" viewBox="0 0 16 16" aria-hidden="true">
<path d="M8,1C4.1,1,1,4.1,1,8c0,3.9,3.1,7,7,7s7-3.1,7-7C15,4.1,11.9,1,8,1z M7,11L4.3,8.3l0.9-0.8L7,9.3l4-3.9l0.9,0.8L7,11z"></path>
<path d="M7,11L4.3,8.3l0.9-0.8L7,9.3l4-3.9l0.9,0.8L7,11z" data-icon-path="inner-path" opacity="0"></path>
</svg>
</div>
<div class="bx--tile-content">
<!-- Tile content here -->
</div>
</label>
"""
rendered = compare_template(template, expected, context)
self.assertEqual(*rendered)
def test_grid(self):
form = DummyForm(data={'tiles': ''})
context = {'form': form}
template = """
{% load carbondesign %}
{% TileGrid %}
{% Row %}
{% Col md=12 %}
{% Tile mode="expandable" %}
{% Slot 'above' style="height: 200px" %}
<!-- Above the fold content here -->
<h2>Above the fold content here</h2>
{% endSlot %}
<!-- Rest of the content here -->
<h2>Below the fold content here</h2>
{% Slot 'icon' %}
<svg width="12" height="7" viewBox="0 0 12 7">
<path fill-rule="nonzero" d="M6.002 5.55L11.27 0l.726.685L6.003 7 0 .685.726 0z" />
</svg>
{% endSlot %}
{% endTile %}
{% endCol %}
{% endRow %}
{% Row %}
{% Col sm=2 %}
{% TileSelect form.tiles mode="inside" value="tile" id="tile-id-1" %}
<!-- Tile content here -->
{% endTileSelect %}
{% endCol %}
{% Col sm=2 %}
{% TileSelect form.tiles mode="inside" value="tile-2" id="tile-id-2" %}
<!-- Tile content here -->
{% endTileSelect %}
{% endCol %}
{% endRow %}
{% Row %}
{% Col md=4 sm=4 %}
{% Tile mode="clickable" %}{% endTile %}
{% endCol %}
{% Col md=4 sm=4 %}
{% Tile mode="clickable" %}{% endTile %}
{% endCol %}
{% Col md=4 sm=4 %}
{% Tile mode="clickable" %}{% endTile %}
{% endCol %}
{% endRow %}
{% Row %}
{% Col md=4 sm=4 %}
{% Tile %}{% endTile %}
{% endCol %}
{% Col md=4 sm=4 %}
{% Tile %}{% endTile %}
{% endCol %}
{% Col md=4 sm=4 %}
{% Tile %}{% endTile %}
{% endCol %}
{% Col md=4 sm=4 %}
{% Tile %}{% endTile %}
{% endCol %}
{% endRow %}
{% Row %}
{% Col lg=4 md=8 %}
{% Tile %}{% endTile %}
{% endCol %}
{% Col lg=4 md=8 %}
{% Tile %}{% endTile %}
{% endCol %}
{% Col lg=4 md=8 %}
{% Tile %}{% endTile %}
{% endCol %}
{% Col lg=4 md=8 %}
{% Tile %}{% endTile %}
{% endCol %}
{% endRow %}
{% Row %}
{% Col lg=16 %}
{% Tile %}{% endTile %}
{% endCol %}
{% endRow %}
{% Row %}
{% Col md=5 sm=2 %}
{% Tile %}{% endTile %}
{% endCol %}
{% Col md=3 sm=2 %}
{% Tile %}{% endTile %}
{% endCol %}
{% endRow %}
{% endTileGrid %}
"""
expected = """
<div class="bx--grid">
<div class="bx--tile-container" style="width: 100%">
<div class="bx--row">
<div class="bx--col bx--col-md-12">
<div data-tile="expandable" class="bx--tile bx--tile--expandable"
tabindex="0">
<button aria-label="expand menu" class="bx--tile__chevron">
<svg width="12" height="7" viewBox="0 0 12 7">
<path fill-rule="nonzero" d="M6.002 5.55L11.27 0l.726.685L6.003 7 0 .685.726 0z" />
</svg>
</button>
<div class="bx--tile-content">
<span data-tile-atf class="bx--tile-content__above-the-fold" style="height: 200px">
<!-- Above the fold content here -->
<h2>Above the fold content here</h2>
</span>
<span class="bx--tile-content__below-the-fold">
<!-- Rest of the content here -->
<h2>Below the fold content here</h2>
</span>
</div>
</div>
</div>
</div>
<div class="bx--row">
<div class="bx--col bx--col-sm-2">
<label class="bx--tile bx--tile--selectable"
data-tile="selectable" tabindex="0" aria-label="tile">
<input type="checkbox" name="tiles" value="tile" id="tile-id-1" title="tile" class="bx--tile-input" tabindex="-1" data-tile-input>
<div class="bx--tile__checkmark">
<svg width="16" height="16" viewBox="0 0 16 16">
<path d="M8 16A8 8 0 1 1 8 0a8 8 0 0 1 0 16zm3.646-10.854L6.75 10.043 4.354 7.646l-.708.708 3.104 3.103 5.604-5.603-.708-.708z"
fill-rule="evenodd" />
</svg>
</div>
<div class="bx--tile-content">
<!-- Tile content here -->
</div>
</label>
</div>
<div class="bx--col bx--col-sm-2">
<label class="bx--tile bx--tile--selectable"
data-tile="selectable" tabindex="0" aria-label="tile-2">
<input type="checkbox" name="tiles" value="tile-2" id="tile-id-2" title="tile-2" class="bx--tile-input" tabindex="-1" data-tile-input>
<div class="bx--tile__checkmark">
<svg width="16" height="16" viewBox="0 0 16 16">
<path d="M8 16A8 8 0 1 1 8 0a8 8 0 0 1 0 16zm3.646-10.854L6.75 10.043 4.354 7.646l-.708.708 3.104 3.103 5.604-5.603-.708-.708z"
fill-rule="evenodd" />
</svg>
</div>
<div class="bx--tile-content">
<!-- Tile content here -->
</div>
</label>
</div>
</div>
<div class="bx--row">
<div class="bx--col bx--col-sm-4 bx--col-md-4">
<a data-tile="clickable" class="bx--tile bx--tile--clickable"
tabindex="0">
</a>
</div>
<div class="bx--col bx--col-sm-4 bx--col-md-4">
<a data-tile="clickable" class="bx--tile bx--tile--clickable"
tabindex="0">
</a>
</div>
<div class="bx--col bx--col-sm-4 bx--col-md-4">
<a data-tile="clickable" class="bx--tile bx--tile--clickable"
tabindex="0">
</a>
</div>
</div>
<div class="bx--row">
<div class="bx--col bx--col-sm-4 bx--col-md-4">
<div class="bx--tile">
</div>
</div>
<div class="bx--col bx--col-sm-4 bx--col-md-4">
<div class="bx--tile">
</div>
</div>
<div class="bx--col bx--col-sm-4 bx--col-md-4">
<div class="bx--tile">
</div>
</div>
<div class="bx--col bx--col-sm-4 bx--col-md-4">
<div class="bx--tile">
</div>
</div>
</div>
<div class="bx--row">
<div class="bx--col bx--col-md-8 bx--col-lg-4">
<div class="bx--tile">
</div>
</div>
<div class="bx--col bx--col-md-8 bx--col-lg-4">
<div class="bx--tile">
</div>
</div>
<div class="bx--col bx--col-md-8 bx--col-lg-4">
<div class="bx--tile">
</div>
</div>
<div class="bx--col bx--col-md-8 bx--col-lg-4">
<div class="bx--tile">
</div>
</div>
</div>
<div class="bx--row">
<div class="bx--col bx--col-lg-16">
<div class="bx--tile">
</div>
</div>
</div>
<div class="bx--row">
<div class="bx--col bx--col-sm-2 bx--col-md-5">
<div class="bx--tile">
</div>
</div>
<div class="bx--col bx--col-sm-2 bx--col-md-3">
<div class="bx--tile">
</div>
</div>
</div>
</div>
</div>
"""
rendered = compare_template(template, expected, context)
self.assertEqual(*rendered)
| 29.10303
| 137
| 0.552166
| 1,370
| 9,604
| 3.849635
| 0.128467
| 0.084945
| 0.08722
| 0.055745
| 0.866515
| 0.813993
| 0.77683
| 0.760903
| 0.755973
| 0.725256
| 0
| 0.063853
| 0.225427
| 9,604
| 329
| 138
| 29.191489
| 0.645114
| 0.010933
| 0
| 0.859873
| 0
| 0.089172
| 0.857835
| 0.122473
| 0
| 0
| 0
| 0
| 0.015924
| 1
| 0.015924
| false
| 0
| 0.006369
| 0
| 0.035032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
493b8ba6aae04c323af77be57fb417aec63bb89c
| 14,881
|
py
|
Python
|
jb2/config/cooldown.py
|
spirdon/janbot2
|
b75e011f485378d843d09240b93d00005794eaa5
|
[
"MIT"
] | null | null | null |
jb2/config/cooldown.py
|
spirdon/janbot2
|
b75e011f485378d843d09240b93d00005794eaa5
|
[
"MIT"
] | 1
|
2019-10-23T19:17:06.000Z
|
2019-10-23T19:17:06.000Z
|
jb2/config/cooldown.py
|
spirdon/janbot2
|
b75e011f485378d843d09240b93d00005794eaa5
|
[
"MIT"
] | null | null | null |
import asyncio
import random
import re
import threading
import time
import requests
import discord
import jb2.command
import jb2.embed
class CooldownCommand(jb2.command.Command):
def get_pattern(self):
return r'cooldown( ?\d{2,7})?$'
async def action(self, prefix, message, client):
msg = message.content.strip()
author_m = message.author.mention
new_cd = re.match("^" + self.get_full_pattern(prefix), msg).group(1)
server_info = self.connector.get_server(message.server.id)
if message.author.server_permissions.administrator:
if not new_cd:
cooldown = server_info['cooldown']
text = "Cooldown: **{} s**".format(cooldown)
emb = jb2.embed.info_embed(author_m, text)
else:
text = "Ustawiono cooldown na **{} s**"
text = text.format(new_cd)
self.connector.set_server_cooldown(message.server.id, new_cd)
emb = jb2.embed.success_embed(author_m, text)
else:
text = "Aby wykonać tę operację musisz być Administratorem"
emb = jb2.embed.error_embed(author_m, text)
await client.send_message(message.channel, embed=emb)
class RouletteAddCommand(jb2.command.Command):
def get_pattern(self):
return r'roulette add( [a-zA-Z0-9_ ]+)?$'
async def action(self, prefix, message, client):
msg = message.content.strip()
author_m = message.author.mention
role_name = re.match("^" + self.get_full_pattern(prefix), msg).group(1)
if message.author.server_permissions.administrator:
if not role_name:
text = "Potrzebny parametr: **role**"
emb = jb2.embed.error_embed(author_m, text)
else:
roles = self.connector.get_server_roles(message.server.id)
role_name = role_name.strip()
role_names = [r[2] for r in roles]
role = discord.utils.get(message.server.roles, name=role_name)
if role_name in role_names or not role:
text = "Nie można dodać tej roli: **{}**".format(role_name)
emb = jb2.embed.error_embed(author_m, text)
elif discord.utils.get(message.server.roles, name=role_name):
text = "Dodano rolę: **{}**".format(role_name)
self.connector.add_role_name(message.server.id,
message.channel.id,
role_name)
emb = jb2.embed.success_embed(author_m, text)
else:
text = "Dana rola nie istnieje"
emb = jb2.embed.error_embed(author_m, text)
else:
text = "Aby wykonać tę operację musisz być Administratorem"
emb = jb2.embed.error_embed(author_m, text)
await client.send_message(message.channel, embed=emb)
class RouletteDeleteCommand(jb2.command.Command):
def get_pattern(self):
return r'roulette delete( [a-zA-Z0-9_ ]+)?$'
async def action(self, prefix, message, client):
msg = message.content.strip()
author_m = message.author.mention
role_name = re.match("^" + self.get_full_pattern(prefix), msg).group(1)
if message.author.server_permissions.administrator:
if role_name is None:
text = "Potrzebny parametr: **role**"
emb = jb2.embed.error_embed(author_m, text)
else:
roles = self.connector.get_server_roles(message.server.id)
role_name = role_name.strip()
role_names = [r[2] for r in roles]
role = discord.utils.get(message.server.roles, name=role_name)
if role_name not in role_names or not role:
text = "Nie można usunąć podanej roli"
emb = jb2.embed.error_embed(author_m, text)
else:
text = "Usunięto rolę: **{}**".format(role_name)
self.connector.delete_role_name(message.server.id,
role_name)
emb = jb2.embed.success_embed(author_m, text)
else:
text = "Aby wykonać tę operację musisz być Administratorem"
emb = jb2.embed.error_embed(author_m, text)
await client.send_message(message.channel, embed=emb)
class RouletteStextsCommand(jb2.command.Command):
def get_pattern(self):
return r'roulette stexts( [a-zA-Z0-9_ ]+)?(,)?( ?.+)?$'
async def action(self, prefix, message, client):
msg = message.content.strip()
author_m = message.author.mention
role_name = re.match("^" + self.get_full_pattern(prefix), msg).group(1)
comma = re.match("^" + self.get_full_pattern(prefix), msg).group(2)
url = re.match("^" + self.get_full_pattern(prefix), msg).group(3)
roles = self.connector.get_server_roles(message.server.id)
if message.author.server_permissions.administrator:
if not role_name:
text = "Potrzebny parametr: **role**"
emb = jb2.embed.error_embed(author_m, text)
elif not url:
text = "Potrzebny parametr: **url**"
emb = jb2.embed.error_embed(author_m, text)
elif not comma:
text = "Potrzebny przecinek pomiędzy argumentami"
emb = jb2.embed.error_embed(author_m, text)
else:
url = url.strip()
roles = self.connector.get_server_roles(message.server.id)
role_name = role_name.strip()
role_names = [r[2] for r in roles]
role = discord.utils.get(message.server.roles, name=role_name)
if role_name not in role_names or not role:
text = "Nie można ustawić tekstu do tej roli: **{}**"
text = text.format(role_name)
emb = jb2.embed.error_embed(author_m, text)
elif discord.utils.get(message.server.roles, name=role_name):
text = "Ustawiono tekst do roli: **{}**".format(role_name)
self.connector.set_role_stexts(message.server.id,
role_name,
url)
emb = jb2.embed.success_embed(author_m, text)
else:
text = "Dana rola nie istnieje"
emb = jb2.embed.error_embed(author_m, text)
else:
text = "Aby wykonać tę operację musisz być Administratorem"
emb = jb2.embed.error_embed(author_m, text)
await client.send_message(message.channel, embed=emb)
class RouletteTextsCommand(jb2.command.Command):
def get_pattern(self):
return r'roulette texts( [a-zA-Z0-9_ ]+)?(,)?( ?.+)?$'
async def action(self, prefix, message, client):
msg = message.content.strip()
author_m = message.author.mention
role_name = re.match("^" + self.get_full_pattern(prefix), msg).group(1)
comma = re.match("^" + self.get_full_pattern(prefix), msg).group(2)
url = re.match("^" + self.get_full_pattern(prefix), msg).group(3)
roles = self.connector.get_server_roles(message.server.id)
if message.author.server_permissions.administrator:
if not role_name:
text = "Potrzebny parametr: **role**"
emb = jb2.embed.error_embed(author_m, text)
elif not url:
text = "Potrzebny parametr: **url**"
emb = jb2.embed.error_embed(author_m, text)
elif not comma:
text = "Potrzebny przecinek pomiędzy argumentami"
emb = jb2.embed.error_embed(author_m, text)
else:
url = url.strip()
roles = self.connector.get_server_roles(message.server.id)
role_name = role_name.strip()
role_names = [r[2] for r in roles]
role = discord.utils.get(message.server.roles, name=role_name)
if role_name not in role_names or not role:
text = "Nie można ustawić tekstu do tej roli: **{}**"
text = text.format(role_name)
emb = jb2.embed.error_embed(author_m, text)
elif discord.utils.get(message.server.roles, name=role_name):
text = "Ustawiono tekst do roli: **{}**".format(role_name)
self.connector.set_role_texts(message.server.id,
role_name,
url)
emb = jb2.embed.success_embed(author_m, text)
else:
text = "Dana rola nie istnieje"
emb = jb2.embed.error_embed(author_m, text)
else:
text = "Aby wykonać tę operację musisz być Administratorem"
emb = jb2.embed.error_embed(author_m, text)
await client.send_message(message.channel, embed=emb)
class RouletteEtextsCommand(jb2.command.Command):
def get_pattern(self):
return r'roulette etexts( [a-zA-Z0-9_ ]+)?(,)?( ?.+)?$'
async def action(self, prefix, message, client):
msg = message.content.strip()
author_m = message.author.mention
role_name = re.match("^" + self.get_full_pattern(prefix), msg).group(1)
comma = re.match("^" + self.get_full_pattern(prefix), msg).group(2)
url = re.match("^" + self.get_full_pattern(prefix), msg).group(3)
roles = self.connector.get_server_roles(message.server.id)
if message.author.server_permissions.administrator:
if not role_name:
text = "Potrzebny parametr: **role**"
emb = jb2.embed.error_embed(author_m, text)
elif not url:
text = "Potrzebny parametr: **url**"
emb = jb2.embed.error_embed(author_m, text)
elif not comma:
text = "Potrzebny przecinek pomiędzy argumentami"
emb = jb2.embed.error_embed(author_m, text)
else:
url = url.strip()
roles = self.connector.get_server_roles(message.server.id)
role_name = role_name.strip()
role_names = [r[2] for r in roles]
role = discord.utils.get(message.server.roles, name=role_name)
if role_name not in role_names or not role:
text = "Nie można ustawić tekstu do tej roli: **{}**"
text = text.format(role_name)
emb = jb2.embed.error_embed(author_m, text)
elif discord.utils.get(message.server.roles, name=role_name):
text = "Ustawiono tekst do roli: **{}**".format(role_name)
self.connector.set_role_etexts(message.server.id,
role_name,
url)
emb = jb2.embed.success_embed(author_m, text)
else:
text = "Dana rola nie istnieje"
emb = jb2.embed.error_embed(author_m, text)
else:
text = "Aby wykonać tę operację musisz być Administratorem"
emb = jb2.embed.error_embed(author_m, text)
await client.send_message(message.channel, embed=emb)
class RoleListener:
def __init__(self, connector, client):
self.connector = connector
self.client = client
async def listen(self):
while True:
all_roles = self.connector.get_all_roles()
roulette_channels = self.connector.get_all_roulette_channels()
for role in all_roles:
server_id = role[0]
channel_id = role[1]
role_name = role[2]
owner_id = role[3]
prefix = self.connector.get_server(server_id)['prefix']
prefix = prefix.replace('\\', '')
if channel_id not in roulette_channels:
continue
time_end = role[5]
texts_url = role[7]
etexts_url = role[8]
channel = self.client.get_channel(channel_id)
server = self.client.get_server(server_id)
emb = discord.Embed()
if time.time() < time_end:
if not texts_url:
footer_text = "Zmień domyślny tekst za pomocą " +\
prefix + "roulette texts " + \
"<role_name>, <url>"
desc = "Godzinny reminder, że <@{}> to **{}**!"
desc = desc.format(owner_id, role_name)
emb.set_footer(text=footer_text)
else:
r = requests.get(texts_url)
content = r.text.split('\n')
rand_text = random.choice(content)
desc = rand_text.format("<@{}>".format(owner_id))
emb.description = desc
emb.title = ":reminder_ribbon: Ruletka"
await self.client.send_message(channel, embed=emb)
elif owner_id:
self.connector.set_role_owner(server_id,
role_name,
'')
member = server.get_member(owner_id)
role = discord.utils.get(server.roles,
name=role_name)
await self.client.remove_roles(member, role)
if not etexts_url:
footer_text = "Zmień domyślny tekst za pomocą " +\
prefix + "roulette etexts " + \
"<role_name>, <url>"
desc = "<@{}> już nie ma roli **{}**!"
desc = desc.format(owner_id, role_name)
emb.set_footer(text=footer_text)
else:
r = requests.get(etexts_url)
content = r.text.split('\n')
rand_text = random.choice(content)
desc = rand_text.format("<@{}>".format(owner_id))
emb.description = desc
emb.title = ":reminder_ribbon: Ruletka"
await self.client.send_message(channel, embed=emb)
await asyncio.sleep(3600)
| 45.368902
| 79
| 0.532827
| 1,640
| 14,881
| 4.667073
| 0.095122
| 0.059577
| 0.047426
| 0.068984
| 0.826757
| 0.818004
| 0.806768
| 0.806768
| 0.806768
| 0.79723
| 0
| 0.008588
| 0.366172
| 14,881
| 327
| 80
| 45.507645
| 0.802905
| 0
| 0
| 0.710247
| 0
| 0
| 0.108259
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024735
| false
| 0
| 0.031802
| 0.021201
| 0.102474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
494b127f2fd8471e3ac8f1133a5ef218893ca386
| 2,810
|
py
|
Python
|
models/networks/iunet_utils.py
|
Myyyr/segmentation
|
6b9423e327cff1eb23599404031b7fb8e9ecf75d
|
[
"MIT"
] | null | null | null |
models/networks/iunet_utils.py
|
Myyyr/segmentation
|
6b9423e327cff1eb23599404031b7fb8e9ecf75d
|
[
"MIT"
] | null | null | null |
models/networks/iunet_utils.py
|
Myyyr/segmentation
|
6b9423e327cff1eb23599404031b7fb8e9ecf75d
|
[
"MIT"
] | null | null | null |
def get_num_channels(input_shape_or_channels):
"""
Small helper function which outputs the number of
channels regardless of whether the input shape or
the number of channels were passed.
"""
if hasattr(input_shape_or_channels,'__iter__'):
return input_shape_or_channels[0]
else:
return input_shape_or_channels
def calculate_shapes_or_channels(
input_shape_or_channels,
slice_fraction,
dim,
i_level,
sliced = False):
# If input_shape_or_channels is the input shape
if hasattr(input_shape_or_channels,'__iter__'):
assert(len(input_shape_or_channels) == dim+1)
if i_level == 0:
return input_shape_or_channels
else:
# Copy to list to prevent changing the original list.
output_shape = [i for i in input_shape_or_channels]
initial_split = (input_shape_or_channels // slice_fraction + input_shape_or_channels%slice_fraction) * slice_fraction
output_shape[0] = (initial_split * 2**(dim*i_level) //
(slice_fraction**i_level))
if sliced:
output_shape[0] = output_shape[0] // slice_fraction
resolution_quotient = 2**i_level
for j in range(1,len(output_shape)):
output_shape[j] = output_shape[j] // resolution_quotient
return output_shape
else:
# If input_shape_or_channels is just the number of channels
if i_level == 0:
return input_shape_or_channels
else:
initial_split = (input_shape_or_channels // slice_fraction + input_shape_or_channels%slice_fraction) * slice_fraction
return (initial_split * 2**(dim*i_level) // (slice_fraction**i_level))
def calculate_shapes_or_channels_old(
input_shape_or_channels,
slice_fraction,
dim,
i_level,
sliced = False):
# If input_shape_or_channels is the input shape
if hasattr(input_shape_or_channels,'__iter__'):
assert(len(input_shape_or_channels) == dim+1)
# Copy to list to prevent changing the original list.
output_shape = [i for i in input_shape_or_channels]
output_shape[0] = (output_shape[0] * 2**(dim*i_level) //
(slice_fraction**i_level))
if sliced:
output_shape[0] = output_shape[0] // slice_fraction
resolution_quotient = 2**i_level
for j in range(1,len(output_shape)):
output_shape[j] = output_shape[j] // resolution_quotient
return output_shape
else:
# If input_shape_or_channels is just the number of channels
return (input_shape_or_channels * 2**(dim*i_level) //
(slice_fraction**i_level))
| 35.56962
| 129
| 0.638434
| 363
| 2,810
| 4.548209
| 0.15427
| 0.15748
| 0.17444
| 0.278619
| 0.910357
| 0.812235
| 0.797698
| 0.77771
| 0.760145
| 0.760145
| 0
| 0.009965
| 0.285765
| 2,810
| 79
| 130
| 35.56962
| 0.812656
| 0.160142
| 0
| 0.846154
| 0
| 0
| 0.010292
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0.057692
| false
| 0
| 0
| 0
| 0.211538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4988e3db8e37f0f69fd6de168fe919341d7d7c62
| 95
|
py
|
Python
|
0x0A-python-inheritance/1-my_list.py
|
calypsobronte/holbertonschool-higher_level_programming
|
c39c060d8473976fa475d22fffba5cb4329c9965
|
[
"MIT"
] | null | null | null |
0x0A-python-inheritance/1-my_list.py
|
calypsobronte/holbertonschool-higher_level_programming
|
c39c060d8473976fa475d22fffba5cb4329c9965
|
[
"MIT"
] | null | null | null |
0x0A-python-inheritance/1-my_list.py
|
calypsobronte/holbertonschool-higher_level_programming
|
c39c060d8473976fa475d22fffba5cb4329c9965
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
class MyList(list):
def print_sorted(self):
print(sorted(self))
| 19
| 27
| 0.652632
| 13
| 95
| 4.692308
| 0.769231
| 0.360656
| 0.491803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012987
| 0.189474
| 95
| 4
| 28
| 23.75
| 0.779221
| 0.178947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
|
0
| 7
|
b8dbfc88a392ba61c1466df5444d1be16f128740
| 575
|
py
|
Python
|
Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/tensorflow/_api/v1/keras/applications/densenet/__init__.py
|
JustinACoder/H22-GR3-UnrealAI
|
361eb9ef1147f8a2991e5f98c4118cd823184adf
|
[
"MIT"
] | 6
|
2022-02-04T18:12:24.000Z
|
2022-03-21T23:57:12.000Z
|
Lib/site-packages/tensorflow/_api/v1/keras/applications/densenet/__init__.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/tensorflow/_api/v1/keras/applications/densenet/__init__.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | 1
|
2022-02-08T03:53:23.000Z
|
2022-02-08T03:53:23.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""DenseNet models for Keras.
"""
from __future__ import print_function
from tensorflow.python.keras.applications import DenseNet121
from tensorflow.python.keras.applications import DenseNet169
from tensorflow.python.keras.applications import DenseNet201
from tensorflow.python.keras.applications.densenet import decode_predictions
from tensorflow.python.keras.applications.densenet import preprocess_input
del print_function
| 35.9375
| 83
| 0.826087
| 71
| 575
| 6.549296
| 0.492958
| 0.206452
| 0.215054
| 0.268817
| 0.496774
| 0.496774
| 0.219355
| 0
| 0
| 0
| 0
| 0.017613
| 0.111304
| 575
| 15
| 84
| 38.333333
| 0.892368
| 0.266087
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.857143
| 0
| 0.857143
| 0.285714
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b8e4fa7d547e1d210cd2782e8b6cf7ff19acd1bd
| 11,701
|
py
|
Python
|
scripts/find_images_upload.py
|
tommyfuu/flower_map_new
|
6488b118c2d41c41829f83087761342c81d0ef8c
|
[
"MIT"
] | 1
|
2021-09-21T21:37:35.000Z
|
2021-09-21T21:37:35.000Z
|
scripts/find_images_upload.py
|
tommyfuu/flower_map_new
|
6488b118c2d41c41829f83087761342c81d0ef8c
|
[
"MIT"
] | null | null | null |
scripts/find_images_upload.py
|
tommyfuu/flower_map_new
|
6488b118c2d41c41829f83087761342c81d0ef8c
|
[
"MIT"
] | null | null | null |
import pandas as pd
from subprocess import Popen, PIPE
def find_images(csv_summary_path, directory_path, output_txt_path, gDrive_path):
a = pd.read_csv(csv_summary_path)
start_list = a['image_name_start'].tolist()
end_list = a['image_name_end'].tolist()
all_contained_image_names = []
print("prep start, identifying all images")
for index in range(len(start_list)):
current_start = start_list[index]
current_end = end_list[index]
current_prefix = ""
current_prefix_list = current_start.split("_")[:-1]
for el in current_prefix_list:
current_prefix += el + "_"
current_start_number = int(current_start.split("_")[-1])
current_end_number = int(current_end.split("_")[-1])
current_range = range(current_start_number, current_end_number)
current_range_image_names = []
for image in current_range:
if len(str(image)) == 2:
current_range_image_names.append(current_prefix+'00'+str(image)+".JPG")
elif len(str(image)) == 3:
current_range_image_names.append(current_prefix+'0'+str(image)+".JPG")
elif len(str(image)) == 1:
current_range_image_names.append(current_prefix+'000'+str(image)+".JPG")
all_contained_image_names.extend(current_range_image_names)
print("writing image names to txt file at", output_txt_path)
with open(output_txt_path, 'w') as f:
for item in all_contained_image_names:
f.write("%s\n" % item)
include_from = '--include-from='+output_txt_path
process = 'rclone copy '+directory_path+" "+gDrive_path+" "+include_from +" -v"
print("prep finished, run the following command to upload all chosen images")
print(process)
# process = Popen(['rclone', 'copy', directory_path, gDrive_path, include_from, '-v'], stdout=PIPE, stderr=PIPE)
# stdout, stderr = process.communicate()
return all_contained_image_names
# find_images('/mnt/biology/donaldson/tom/flower_map_new/070921_NorthHasPlants.csv',
# '/mnt/biology/donaldson/tom/flower_map_new/newData/070921_North',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/0709_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/070921_North"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/071121_CentralEasternHasPlants.csv',
# '/mnt/biology/donaldson/tom/flower_map_new/newData/071121_CentralEastern',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/0711_CentralEastern_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/071121_CentralEastern"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/071621_South.csv',
# '/mnt/biology/donaldson/tom/flower_map_new/newData/071621_South',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/071621_South_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/071621_South"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/071121_Western.csv',
# '/mnt/biology/donaldson/tom/flower_map_new/newData/071121_Western',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/071121_Western_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/071121_Western"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6217East.csv',
# '/mnt/biology/donaldson/tom/flower_map/data/Week3/6217East',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6217East_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/2017_6217East"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6617East1.csv',
# '/mnt/biology/donaldson/tom/flower_map/data/Week4/6617East1',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6617East1_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/2017_6617East1"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6617East2.csv',
# '/mnt/biology/donaldson/tom/flower_map/data/Week4/6617East2',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6617East2_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/2017_6617East2"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6917West.csv',
# '/mnt/biology/donaldson/tom/flower_map/data/Week4/6917West',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6917West_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/2017_6917West"')
def upload_jsons(json_folder, gDrive_path):
process = 'rclone copy '+json_folder+" "+gDrive_path+" "+" -v"
print("prep finished, run the following command to upload all chosen images")
print(process)
# process = Popen(['rclone', 'copy', directory_path, gDrive_path, include_from, '-v'], stdout=PIPE, stderr=PIPE)
# stdout, stderr = process.communicate()
return
# upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20176217_east',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/2017_6217East_json"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6217East.csv',
# '/mnt/biology/donaldson/tom/flower_map/data/Week3/6217East',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6217East_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/2017_6217East_json"')
# upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20176917_west',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/2017_6917West_json"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6917West.csv',
# '/mnt/biology/donaldson/tom/flower_map/data/Week4/6917West',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6917West_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/dataToBeLabelled/2017_6917West_json"')
#### Final Data Uploads
# find_images('/mnt/biology/donaldson/tom/flower_map_new/070921_NorthHasPlants.csv',
# '/mnt/biology/donaldson/tom/flower_map_new/newData/070921_North',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/0709_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/070921_North"')
# upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20210709_north',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/070921_North"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/071121_CentralEasternHasPlants.csv',
# '/mnt/biology/donaldson/tom/flower_map_new/newData/071121_CentralEastern',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/0711_CentralEastern_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/071121_CentralEastern"')
# upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20210711_centralEastern',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/071121_CentralEastern"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/071621_South.csv',
# '/mnt/biology/donaldson/tom/flower_map_new/newData/071621_South',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/071621_South_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/071621_South"')
# upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20210716_south',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/071621_South"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/071121_Western.csv',
# '/mnt/biology/donaldson/tom/flower_map_new/newData/071121_Western',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/071121_Western_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/071121_Western"')
# upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20210711_western',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/071121_Western"')
find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6217East.csv',
'/mnt/biology/donaldson/tom/flower_map/data/Week3/6217East',
'/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6217East_summary.txt',
'"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/2017_6217East"')
upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20176217_east',
'"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/2017_6217East"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6617East1.csv',
# '/mnt/biology/donaldson/tom/flower_map/data/Week4/6617East1',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6617East1_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/2017_6617East1"')
# upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20176617_east1',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/2017_6617East1"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6617East2.csv',
# '/mnt/biology/donaldson/tom/flower_map/data/Week4/6617East2',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6617East2_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/2017_6617East2"')
# upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20176617_east2',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/2017_6617East2"')
# find_images('/mnt/biology/donaldson/tom/flower_map_new/2017_6917West.csv',
# '/mnt/biology/donaldson/tom/flower_map/data/Week4/6917West',
# '/mnt/biology/donaldson/tom/flower_map_new/useful_images_summaries/2017_6917West_summary.txt',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/2017_6917West"')
# upload_jsons('/mnt/biology/donaldson/tom/flower_map_new/annotations/jsons_20176917_west',
# '"knuthXGDrive:/Bee Lab/Projects/Bee Forage Mapping/Bee Forage Mapping - Tom Thesis/labelled data/2017_6917West"')
| 61.910053
| 136
| 0.744979
| 1,523
| 11,701
| 5.468155
| 0.089954
| 0.076849
| 0.146013
| 0.169068
| 0.882325
| 0.882325
| 0.882325
| 0.861311
| 0.861311
| 0.856028
| 0
| 0.064777
| 0.131869
| 11,701
| 188
| 137
| 62.239362
| 0.75507
| 0.751047
| 0
| 0.12766
| 0
| 0.042553
| 0.28825
| 0.099397
| 0.021277
| 0
| 0
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.042553
| 0
| 0.12766
| 0.12766
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8f0654e66164112136b9fc4adbe892cca2b1b3b
| 85
|
py
|
Python
|
MultipleClassTest.py
|
MinjaMiladinovic/numpy
|
731a08977355babd56a01e4bb67e72693ec4b0e7
|
[
"BSD-3-Clause"
] | null | null | null |
MultipleClassTest.py
|
MinjaMiladinovic/numpy
|
731a08977355babd56a01e4bb67e72693ec4b0e7
|
[
"BSD-3-Clause"
] | 6
|
2020-09-18T10:58:24.000Z
|
2020-09-18T12:08:08.000Z
|
MultipleClassTest.py
|
MinjaMiladinovic/numpy
|
731a08977355babd56a01e4bb67e72693ec4b0e7
|
[
"BSD-3-Clause"
] | null | null | null |
class Foo:
def __init__(self):
pass
class Bar:
def __init__(self):
pass
| 10.625
| 21
| 0.635294
| 12
| 85
| 3.833333
| 0.583333
| 0.304348
| 0.478261
| 0.652174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.270588
| 85
| 7
| 22
| 12.142857
| 0.741935
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
770dc561219238a683ce46eeea70d11830854b3d
| 53,082
|
py
|
Python
|
tests/scheduling/test_condition.py
|
kmantel/graph-scheduler
|
d077b5d8ccf18293b856b5a3c677dda4b802a3b3
|
[
"Apache-2.0"
] | null | null | null |
tests/scheduling/test_condition.py
|
kmantel/graph-scheduler
|
d077b5d8ccf18293b856b5a3c677dda4b802a3b3
|
[
"Apache-2.0"
] | 49
|
2021-07-27T21:52:27.000Z
|
2022-03-30T18:27:04.000Z
|
tests/scheduling/test_condition.py
|
kmantel/graph-scheduler
|
d077b5d8ccf18293b856b5a3c677dda4b802a3b3
|
[
"Apache-2.0"
] | null | null | null |
import logging
import graph_scheduler
import pytest
from psyneulink import _unit_registry
from psyneulink.core.components.functions.nonstateful.transferfunctions import Linear
from psyneulink.core.components.mechanisms.processing.transfermechanism import TransferMechanism
from psyneulink.core.components.projections.pathway.mappingprojection import MappingProjection
from psyneulink.core.compositions.composition import Composition
from psyneulink.core.scheduling.condition import (
AfterCall, AfterNCalls, AfterNCallsCombined, AfterNPasses, AfterNEnvironmentStateUpdates,
AfterPass, AfterEnvironmentStateUpdate, All, AllHaveRun, Always, Any, AtPass, AtConsiderationSetExecution,
AtEnvironmentStateUpdate, AtEnvironmentStateUpdateStart, BeforeNCalls, BeforePass, BeforeConsiderationSetExecution,
BeforeEnvironmentStateUpdate, Condition, ConditionError, EveryNCalls, EveryNPasses, Not,
NWhen, TimeInterval, TimeTermination, WhenFinished, WhenFinishedAll,
WhenFinishedAny, WhileNot,
)
from psyneulink.core.scheduling.scheduler import Scheduler
from psyneulink.core.scheduling.time import TimeScale
logger = logging.getLogger(__name__)
class TestCondition:
def test_invalid_input_WhenFinished(self):
with pytest.raises(ConditionError):
WhenFinished(None).is_satisfied()
def test_invalid_input_WhenFinishedAny_1(self):
with pytest.raises(ConditionError):
WhenFinished(None).is_satisfied()
def test_invalid_input_WhenFinishedAny_2(self):
with pytest.raises(ConditionError):
WhenFinished({None}).is_satisfied()
def test_invalid_input_WhenFinishedAll_1(self):
with pytest.raises(ConditionError):
WhenFinished(None).is_satisfied()
def test_invalid_input_WhenFinishedAll_2(self):
with pytest.raises(ConditionError):
WhenFinished({None}).is_satisfied()
def test_additional_args(self):
class OneSatisfied(Condition):
def __init__(self, a):
def func(a, b):
return a or b
super().__init__(func, a)
cond = OneSatisfied(True)
assert cond.is_satisfied(True)
assert cond.is_satisfied(False)
cond = OneSatisfied(False)
assert cond.is_satisfied(True)
assert not cond.is_satisfied(False)
def test_additional_kwargs(self):
class OneSatisfied(Condition):
def __init__(self, a, c=True):
def func(a, b, c=True):
return a or b or c
super().__init__(func, a, c=True)
cond = OneSatisfied(True)
assert cond.is_satisfied(True)
assert cond.is_satisfied(False, c=True)
assert cond.is_satisfied(False, c=False)
cond = OneSatisfied(True, c=False)
assert cond.is_satisfied(True)
assert cond.is_satisfied(False, c=True)
assert cond.is_satisfied(False, c=False)
cond = OneSatisfied(False)
assert cond.is_satisfied(True)
assert cond.is_satisfied(False, c=True)
assert not cond.is_satisfied(False, c=False)
assert not cond.is_satisfied(False, c=False, extra_arg=True)
@pytest.mark.psyneulink
class TestGeneric:
def test_WhileNot_AtPass(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, WhileNot(lambda sched: sched.get_clock(sched.default_execution_id).get_total_times_relative(TimeScale.PASS, TimeScale.ENVIRONMENT_STATE_UPDATE) == 0, sched))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [set(), A, A, A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_WhileNot_AtPass_in_middle(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, WhileNot(lambda sched: sched.get_clock(sched.default_execution_id).get_total_times_relative(TimeScale.PASS, TimeScale.ENVIRONMENT_STATE_UPDATE) == 2, sched))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, A, set(), A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.psyneulink
class TestRelative:
def test_Any_end_before_one_finished(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
for m in [A]:
comp.add_node(m)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = Any(AfterNCalls(A, 10), AtPass(5))
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A for _ in range(5)]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_All_end_after_one_finished(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
for m in [A]:
comp.add_node(m)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = Any(AfterNCalls(A, 5), AtPass(10))
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A for _ in range(5)]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_Not_AtPass(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, Not(AtPass(0)))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [set(), A, A, A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_Not_AtPass_in_middle(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, Not(AtPass(2)))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, A, set(), A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.parametrize(
'n,expected_output', [
(0, ['A', 'A', 'A', 'A', 'A', 'A']),
(1, ['A', 'A', 'A', 'B', 'A', 'A', 'A']),
(2, ['A', 'A', 'A', 'B', 'A', 'B', 'A', 'A']),
]
)
def test_NWhen_AfterNCalls(self, n, expected_output):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
for m in [A, B]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, B)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, Always())
sched.add_condition(B, NWhen(AfterNCalls(A, 3), n))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AfterNCalls(A, 6)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A if x == 'A' else B for x in expected_output]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.psyneulink
class TestTimePNL:
def test_BeforeConsiderationSetExecution(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, BeforeConsiderationSetExecution(2))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, A, set(), set(), set()]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_BeforeConsiderationSetExecution_2(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
B = TransferMechanism(name='B')
comp.add_node(A)
comp.add_node(B)
comp.add_projection(MappingProjection(), A, B)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, BeforeConsiderationSetExecution(2))
sched.add_condition(B, Always())
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, B, B, B, B, B]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AtConsiderationSetExecution(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, AtConsiderationSetExecution(0))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, set(), set(), set(), set()]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_BeforePass(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, BeforePass(2))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, A, set(), set(), set()]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AtPass(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, AtPass(0))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, set(), set(), set(), set()]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AtPass_underconstrained(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, B)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, AtPass(0))
sched.add_condition(B, Always())
sched.add_condition(C, Always())
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AfterNCalls(C, 2)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, B, C, B, C]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AtPass_in_middle(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, AtPass(2))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [set(), set(), A, set(), set()]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AtPass_at_end(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, AtPass(5))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [set(), set(), set(), set(), set()]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AtPass_after_end(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, AtPass(6))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [set(), set(), set(), set(), set()]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AfterPass(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, AfterPass(0))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [set(), A, A, A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AfterNPasses(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, AfterNPasses(1))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [set(), A, A, A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_BeforeEnvironmentStateUpdate(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, BeforeEnvironmentStateUpdate(4))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(5)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(1)
comp.run(
inputs={A: range(6)},
scheduler=sched,
termination_processing=termination_conds
)
output = sched.execution_list[comp.default_execution_id]
expected_output = [A, A, A, A, set()]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AtEnvironmentStateUpdate(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, Always())
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AtEnvironmentStateUpdate(4)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(1)
comp.run(
inputs={A: range(6)},
scheduler=sched,
termination_processing=termination_conds
)
output = sched.execution_list[comp.default_execution_id]
expected_output = [A, A, A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AfterEnvironmentStateUpdate(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, Always())
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterEnvironmentStateUpdate(4)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(1)
comp.run(
inputs={A: range(6)},
scheduler=sched,
termination_processing=termination_conds
)
output = sched.execution_list[comp.default_execution_id]
expected_output = [A, A, A, A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AfterNEnvironmentStateUpdates(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, AfterNPasses(1))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [set(), A, A, A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
class TestTime:
@pytest.mark.parametrize(
'node_condition, termination_conditions, expected_output',
[
pytest.param(
graph_scheduler.AfterNPasses(1),
{
TimeScale.ENVIRONMENT_SEQUENCE: graph_scheduler.AfterNEnvironmentStateUpdates(1),
TimeScale.ENVIRONMENT_STATE_UPDATE: graph_scheduler.AfterConsiderationSetExecution(4)
},
[set(), 'A', 'A', 'A', 'A'],
id='AfterConsiderationSetExecution'
),
pytest.param(
graph_scheduler.AfterNPasses(1),
{
TimeScale.ENVIRONMENT_SEQUENCE: graph_scheduler.AfterNEnvironmentStateUpdates(1),
TimeScale.ENVIRONMENT_STATE_UPDATE: graph_scheduler.AfterNConsiderationSetExecutions(5)
},
[set(), 'A', 'A', 'A', 'A'],
id='AfterNConsiderationSetExecutions'
),
]
)
def test_single_node(
self, node_condition, termination_conditions, expected_output
):
graph = {'A': set()}
sched = graph_scheduler.Scheduler(graph)
sched.add_condition('A', node_condition)
output = list(sched.run(termination_conds=termination_conditions))
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.parametrize(
'node_condition, termination_conditions, expected_output, n_sequences, n_state_updates_per_sequence',
[
pytest.param(
graph_scheduler.AtEnvironmentStateUpdateNStart(2),
{TimeScale.ENVIRONMENT_STATE_UPDATE: graph_scheduler.AfterNPasses(1)},
[[set(), set(), 'A', set()]],
1,
4,
id='AtEnvironmentStateUpdateNStart'
),
pytest.param(
graph_scheduler.AtEnvironmentSequence(4),
{TimeScale.ENVIRONMENT_STATE_UPDATE: graph_scheduler.AfterNPasses(1)},
[[set()], [set()], [set()], [set()], ['A'], [set()]],
6,
1,
id='AtEnvironmentSequence'
),
pytest.param(
graph_scheduler.AfterEnvironmentSequence(3),
{TimeScale.ENVIRONMENT_STATE_UPDATE: graph_scheduler.AfterNPasses(1)},
[[set()], [set()], [set()], [set()], ['A'], ['A']],
6,
1,
id='AfterEnvironmentSequence'
),
pytest.param(
graph_scheduler.AfterNEnvironmentSequences(4),
{TimeScale.ENVIRONMENT_STATE_UPDATE: graph_scheduler.AfterNPasses(1)},
[[set()], [set()], [set()], [set()], ['A'], ['A']],
6,
1,
id='AfterNEnvironmentSequences'
),
pytest.param(
graph_scheduler.AtEnvironmentSequenceStart(),
{TimeScale.ENVIRONMENT_STATE_UPDATE: graph_scheduler.AfterNPasses(1)},
[['A', set()], ['A', set()]],
2,
2,
id='AtEnvironmentSequenceStart'
),
pytest.param(
graph_scheduler.AtEnvironmentSequenceNStart(1),
{TimeScale.ENVIRONMENT_STATE_UPDATE: graph_scheduler.AfterNPasses(1)},
[[set(), set()], ['A', set()], [set(), set()]],
3,
2,
id='AtEnvironmentSequenceNStart'
),
]
)
def test_single_node_n_sequences(
self,
node_condition,
termination_conditions,
expected_output,
n_sequences,
n_state_updates_per_sequence
):
graph = {'A': set()}
sched = graph_scheduler.Scheduler(graph)
sched.add_condition('A', node_condition)
output = []
for _ in range(n_sequences):
su = []
for i in range(n_state_updates_per_sequence):
su.extend(
list(sched.run(termination_conds=termination_conditions))
)
output.append(su)
sched.end_environment_sequence()
for i in range(n_sequences):
assert output[i] == pytest.helpers.setify_expected_output(expected_output[i]), f'ENVIRONMENT_SEQUENCE {i}'
@pytest.mark.psyneulink
class TestComponentBased:
def test_BeforeNCalls(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
comp.add_node(A)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, BeforeNCalls(A, 3))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, A, A, set(), set()]
assert output == pytest.helpers.setify_expected_output(expected_output)
# NOTE:
# The behavior is not desired (i.e. depending on the order mechanisms are checked, B running AtCall(A, x))
# may run on both the xth and x+1st call of A; if A and B are not parent-child
# A fix could invalidate key assumptions and affect many other conditions
# Since this condition is unlikely to be used, it's best to leave it for now
# def test_AtCall(self):
# comp = Composition()
# A = TransferMechanism(function = Linear(slope=5.0, intercept = 2.0), name = 'A')
# B = TransferMechanism(function = Linear(intercept = 4.0), name = 'B')
# C = TransferMechanism(function = Linear(intercept = 1.5), name = 'C')
# for m in [A,B]:
# comp.add_node(m)
# sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
# sched.add_condition(A, Always())
# sched.add_condition(B, AtCall(A, 3))
# termination_conds = {}
# termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
# termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
# output = list(sched.run(termination_conds=termination_conds))
# expected_output = [A, A, set([A, B]), A, A]
# assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AfterCall(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
for m in [A, B]:
comp.add_node(m)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(B, AfterCall(A, 3))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, A, A, set([A, B]), set([A, B])]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_AfterNCalls(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
for m in [A, B]:
comp.add_node(m)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, Always())
sched.add_condition(B, AfterNCalls(A, 3))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AtPass(5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, A, set([A, B]), set([A, B]), set([A, B])]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.psyneulink
class TestConvenience:
def test_AtEnvironmentStateUpdateStart(self):
comp = Composition()
A = TransferMechanism(name='A')
B = TransferMechanism(name='B')
comp.add_linear_processing_pathway([A, B])
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(B, AtEnvironmentStateUpdateStart())
termination_conds = {
TimeScale.ENVIRONMENT_STATE_UPDATE: AtPass(3)
}
output = list(sched.run(termination_conds=termination_conds))
expected_output = [A, B, A, A]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.psyneulink
def test_composite_condition_multi(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, B)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
sched.add_condition(B, EveryNCalls(A, 2))
sched.add_condition(C, All(
Any(
AfterPass(6),
AfterNCalls(B, 2)
),
Any(
AfterPass(2),
AfterNCalls(B, 3)
)
)
)
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AfterNCalls(C, 3)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [
A, A, B, A, A, B, C, A, C, A, B, C
]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.psyneulink
def test_AfterNCallsCombined(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
A.is_finished_flag = True
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
B.is_finished_flag = True
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, B)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
sched.add_condition(B, EveryNCalls(A, 2))
sched.add_condition(C, EveryNCalls(B, 2))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AfterNCallsCombined(B, C, n=4)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [
A, A, B, A, A, B, C, A, A, B
]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.psyneulink
def test_AllHaveRun(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
A.is_finished_flag = False
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
B.is_finished_flag = True
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, B)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
sched.add_condition(B, EveryNCalls(A, 2))
sched.add_condition(C, EveryNCalls(B, 2))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AllHaveRun(A, B, C)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [
A, A, B, A, A, B, C
]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.psyneulink
def test_AllHaveRun_2(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, B)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
sched.add_condition(B, EveryNCalls(A, 2))
sched.add_condition(C, EveryNCalls(B, 2))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AllHaveRun()
output = list(sched.run(termination_conds=termination_conds))
expected_output = [
A, A, B, A, A, B, C
]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.psyneulink
class TestWhenFinished:
@classmethod
def setup_class(self):
self.orig_is_finished_flag = TransferMechanism.is_finished_flag
self.orig_is_finished = TransferMechanism.is_finished
TransferMechanism.is_finished_flag = True
TransferMechanism.is_finished = lambda self, context: self.is_finished_flag
@classmethod
def teardown_class(self):
del TransferMechanism.is_finished_flag
del TransferMechanism.is_finished
TransferMechanism.is_finished_flag = self.orig_is_finished_flag
TransferMechanism.is_finished = self.orig_is_finished
def test_WhenFinishedAny_1(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
A.is_finished_flag = True
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
B.is_finished_flag = True
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, C)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
sched.add_condition(B, EveryNPasses(1))
sched.add_condition(C, WhenFinishedAny(A, B))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AfterNCalls(C, 1)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [
set([A, B]), C
]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_WhenFinishedAny_2(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
A.is_finished_flag = False
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
B.is_finished_flag = True
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, C)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
sched.add_condition(B, EveryNPasses(1))
sched.add_condition(C, WhenFinishedAny(A, B))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AfterNCalls(A, 5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [
set([A, B]), C, set([A, B]), C, set([A, B]), C, set([A, B]), C, set([A, B])
]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_WhenFinishedAny_noargs(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
m.is_finished_flag = False
comp.add_node(m)
comp.add_projection(MappingProjection(), A, C)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, Always())
sched.add_condition(B, Always())
sched.add_condition(C, Always())
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = WhenFinishedAny()
output = []
i = 0
for step in sched.run(termination_conds=termination_conds):
if i == 3:
A.is_finished_flag = True
B.is_finished_flag = True
if i == 4:
C.is_finished_flag = True
output.append(step)
i += 1
expected_output = [
set([A, B]), C, set([A, B]), C,
]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_WhenFinishedAll_1(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
A.is_finished_flag = True
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
B.is_finished_flag = True
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, C)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
sched.add_condition(B, EveryNPasses(1))
sched.add_condition(C, WhenFinishedAll(A, B))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AfterNCalls(C, 1)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [
set([A, B]), C
]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_WhenFinishedAll_2(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
A.is_finished_flag = False
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
B.is_finished_flag = True
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
comp.add_projection(MappingProjection(), A, C)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, EveryNPasses(1))
sched.add_condition(B, EveryNPasses(1))
sched.add_condition(C, WhenFinishedAll(A, B))
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = AfterNCalls(A, 5)
output = list(sched.run(termination_conds=termination_conds))
expected_output = [
set([A, B]), set([A, B]), set([A, B]), set([A, B]), set([A, B]),
]
assert output == pytest.helpers.setify_expected_output(expected_output)
def test_WhenFinishedAll_noargs(self):
comp = Composition()
A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A')
B = TransferMechanism(function=Linear(intercept=4.0), name='B')
C = TransferMechanism(function=Linear(intercept=1.5), name='C')
for m in [A, B, C]:
comp.add_node(m)
m.is_finished_flag = False
comp.add_projection(MappingProjection(), A, C)
comp.add_projection(MappingProjection(), B, C)
sched = Scheduler(**pytest.helpers.composition_to_scheduler_args(comp))
sched.add_condition(A, Always())
sched.add_condition(B, Always())
sched.add_condition(C, Always())
termination_conds = {}
termination_conds[TimeScale.ENVIRONMENT_SEQUENCE] = AfterNEnvironmentStateUpdates(1)
termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE] = WhenFinishedAll()
output = []
i = 0
for step in sched.run(termination_conds=termination_conds):
if i == 3:
A.is_finished_flag = True
B.is_finished_flag = True
if i == 4:
C.is_finished_flag = True
output.append(step)
i += 1
expected_output = [
set([A, B]), C, set([A, B]), C, set([A, B]),
]
assert output == pytest.helpers.setify_expected_output(expected_output)
@pytest.mark.psyneulink
class TestAbsolute:
A = TransferMechanism(name='scheduler-pytests-A')
B = TransferMechanism(name='scheduler-pytests-B')
C = TransferMechanism(name='scheduler-pytests-C')
@pytest.mark.parametrize(
'conditions, termination_conds',
[
(
{A: TimeInterval(repeat=8), B: TimeInterval(repeat=4), C: TimeInterval(repeat=2)},
{TimeScale.ENVIRONMENT_STATE_UPDATE: AfterNCalls(A, 2)},
),
(
{A: TimeInterval(repeat=5), B: TimeInterval(repeat=3), C: TimeInterval(repeat=1)},
{TimeScale.ENVIRONMENT_STATE_UPDATE: AfterNCalls(A, 2)},
),
(
{A: TimeInterval(repeat=3), B: TimeInterval(repeat=2)},
{TimeScale.ENVIRONMENT_STATE_UPDATE: AfterNCalls(A, 2)},
),
(
{A: TimeInterval(repeat=5), B: TimeInterval(repeat=7)},
{TimeScale.ENVIRONMENT_STATE_UPDATE: AfterNCalls(B, 2)},
),
(
{A: TimeInterval(repeat=1200), B: TimeInterval(repeat=1000)},
{TimeScale.ENVIRONMENT_STATE_UPDATE: AfterNCalls(A, 3)},
),
(
{A: TimeInterval(repeat=0.33333), B: TimeInterval(repeat=0.66666)},
{TimeScale.ENVIRONMENT_STATE_UPDATE: AfterNCalls(B, 3)},
),
# smaller than default units cause floating point issue without mitigation
(
{A: TimeInterval(repeat=2 * _unit_registry.us), B: TimeInterval(repeat=4 * _unit_registry.us)},
{TimeScale.ENVIRONMENT_STATE_UPDATE: AfterNCalls(B, 3)},
),
]
)
def test_TimeInterval_linear_everynms(self, conditions, termination_conds):
comp = Composition()
comp.add_linear_processing_pathway([self.A, self.B, self.C])
comp.scheduler.add_condition_set(conditions)
list(comp.scheduler.run(termination_conds=termination_conds))
for node, cond in conditions.items():
executions = [
comp.scheduler.execution_timestamps[comp.default_execution_id][i].absolute
for i in range(len(comp.scheduler.execution_list[comp.default_execution_id]))
if node in comp.scheduler.execution_list[comp.default_execution_id][i]
]
for i in range(1, len(executions)):
assert (executions[i] - executions[i - 1]) == cond.repeat
@pytest.mark.parametrize(
'conditions, termination_conds',
[
(
{
A: TimeInterval(repeat=10, start=100),
B: TimeInterval(repeat=10, start=300),
C: TimeInterval(repeat=10, start=400)
},
{TimeScale.ENVIRONMENT_STATE_UPDATE: TimeInterval(start=500)}
),
(
{
A: TimeInterval(start=100),
B: TimeInterval(start=300),
C: TimeInterval(start=400)
},
{TimeScale.ENVIRONMENT_STATE_UPDATE: TimeInterval(start=500)}
),
(
{
A: TimeInterval(repeat=2, start=105),
B: TimeInterval(repeat=7, start=317),
C: TimeInterval(repeat=11, start=431)
},
{TimeScale.ENVIRONMENT_STATE_UPDATE: TimeInterval(start=597)}
),
(
{
A: TimeInterval(repeat=10, start=100, start_inclusive=False),
B: TimeInterval(repeat=10, start=300),
C: TimeInterval(repeat=10, start=400)
},
{TimeScale.ENVIRONMENT_STATE_UPDATE: TimeInterval(start=500)}
),
(
{
A: TimeInterval(repeat=10, start=100, start_inclusive=False),
B: TimeInterval(repeat=10, start=300),
C: TimeInterval(repeat=10, start=400)
},
{TimeScale.ENVIRONMENT_STATE_UPDATE: TimeInterval(start=500, start_inclusive=False)}
),
(
{
A: TimeInterval(repeat=10, start=100),
B: TimeInterval(repeat=10, start=100, end=200),
C: TimeInterval(repeat=10, start=400)
},
{TimeScale.ENVIRONMENT_STATE_UPDATE: TimeInterval(start=500, start_inclusive=False)}
),
(
{
A: TimeInterval(repeat=10, start=100),
B: TimeInterval(repeat=10, start=100, end=200, end_inclusive=False),
C: TimeInterval(repeat=10, start=400)
},
{TimeScale.ENVIRONMENT_STATE_UPDATE: TimeInterval(start=500)}
),
]
)
def test_TimeInterval_no_dependencies(self, conditions, termination_conds):
comp = Composition()
comp.add_nodes([self.A, self.B, self.C])
comp.scheduler.add_condition_set(conditions)
consideration_set_execution_abs_value = comp.scheduler._get_absolute_consideration_set_execution_unit(termination_conds)
list(comp.scheduler.run(termination_conds=termination_conds))
for node, cond in conditions.items():
executions = [
comp.scheduler.execution_timestamps[comp.default_execution_id][i].absolute
for i in range(len(comp.scheduler.execution_list[comp.default_execution_id]))
if node in comp.scheduler.execution_list[comp.default_execution_id][i]
]
for i in range(1, len(executions)):
interval = (executions[i] - executions[i - 1])
if cond.repeat is not None:
assert interval == cond.repeat
else:
assert interval == consideration_set_execution_abs_value
if cond.start is not None:
if cond.start_inclusive:
assert cond.start in executions
else:
assert cond.start + consideration_set_execution_abs_value in executions
# this test only runs a single ENVIRONMENT_STATE_UPDATE, so this
# timestamp corresponds to its last
final_timestamp = comp.scheduler.execution_timestamps[comp.default_execution_id][-1].absolute
term_cond = termination_conds[TimeScale.ENVIRONMENT_STATE_UPDATE]
if term_cond.start_inclusive:
assert term_cond.start - consideration_set_execution_abs_value == final_timestamp
else:
assert term_cond.start == final_timestamp
@pytest.mark.parametrize(
'repeat, unit, expected_repeat',
[
(1, None, 1 * _unit_registry.ms),
('1ms', None, 1 * _unit_registry.ms),
(1 * _unit_registry.ms, None, 1 * _unit_registry.ms),
(1, 'ms', 1 * _unit_registry.ms),
(1, _unit_registry.ms, 1 * _unit_registry.ms),
('1', _unit_registry.ms, 1 * _unit_registry.ms),
(1 * _unit_registry.ms, _unit_registry.ns, 1 * _unit_registry.ms),
(1000 * _unit_registry.ms, None, 1000 * _unit_registry.ms),
]
)
def test_TimeInterval_time_specs(self, repeat, unit, expected_repeat):
if unit is None:
c = TimeInterval(repeat=repeat)
else:
c = TimeInterval(repeat=repeat, unit=unit)
assert c.repeat == expected_repeat
@pytest.mark.parametrize(
'repeat, inclusive, last_time',
[
(10, True, 10 * _unit_registry.ms),
(10, False, 11 * _unit_registry.ms),
]
)
def test_TimeTermination(
self,
three_node_linear_composition,
repeat,
inclusive,
last_time
):
_, comp = three_node_linear_composition
comp.scheduler.termination_conds = {
TimeScale.ENVIRONMENT_STATE_UPDATE: TimeTermination(repeat, inclusive)
}
list(comp.scheduler.run())
assert comp.scheduler.get_clock(comp.scheduler.default_execution_id).time.absolute == last_time
| 43.869421
| 192
| 0.619758
| 5,576
| 53,082
| 5.689383
| 0.050753
| 0.09734
| 0.059104
| 0.085109
| 0.840373
| 0.82953
| 0.821019
| 0.802894
| 0.781427
| 0.77569
| 0
| 0.014845
| 0.274123
| 53,082
| 1,209
| 193
| 43.905707
| 0.808492
| 0.025621
| 0
| 0.660606
| 0
| 0
| 0.013424
| 0.00561
| 0
| 0
| 0
| 0
| 0.061616
| 1
| 0.055556
| false
| 0.077778
| 0.011111
| 0.00202
| 0.082828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
772f48d1382319b017085e4020b9996d6332999e
| 1,661
|
py
|
Python
|
corehq/warehouse/migrations/0024_metafields.py
|
dborowiecki/commcare-hq
|
f2f4fa67faec09040a98502f5657444075b63f2e
|
[
"BSD-3-Clause"
] | null | null | null |
corehq/warehouse/migrations/0024_metafields.py
|
dborowiecki/commcare-hq
|
f2f4fa67faec09040a98502f5657444075b63f2e
|
[
"BSD-3-Clause"
] | null | null | null |
corehq/warehouse/migrations/0024_metafields.py
|
dborowiecki/commcare-hq
|
f2f4fa67faec09040a98502f5657444075b63f2e
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 1.11.10 on 2018-03-06 12:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('warehouse', '0023_add_build_info'),
]
operations = [
migrations.AddField(
model_name='formfact',
name='app_version',
field=models.PositiveIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='formfact',
name='commcare_version',
field=models.CharField(blank=True, max_length=8, null=True),
),
migrations.AddField(
model_name='formfact',
name='time_end',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='formfact',
name='time_start',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='formstagingtable',
name='app_version',
field=models.PositiveIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='formstagingtable',
name='commcare_version',
field=models.CharField(blank=True, max_length=8, null=True),
),
migrations.AddField(
model_name='formstagingtable',
name='time_end',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='formstagingtable',
name='time_start',
field=models.DateTimeField(blank=True, null=True),
),
]
| 30.759259
| 72
| 0.572547
| 155
| 1,661
| 6
| 0.303226
| 0.154839
| 0.197849
| 0.232258
| 0.83871
| 0.83871
| 0.801075
| 0.801075
| 0.801075
| 0.775269
| 0
| 0.020158
| 0.313064
| 1,661
| 53
| 73
| 31.339623
| 0.794917
| 0.028296
| 0
| 0.851064
| 1
| 0
| 0.132754
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021277
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
77304d97aeac92fa7921306c9bba992f912c6e62
| 15,624
|
py
|
Python
|
ec2_compare/internal/instance_type/g4ad.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/instance_type/g4ad.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/instance_type/g4ad.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
# Automatically generated
# pylint: disable=all
get = [{'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0, 'DefaultVCpus': 4, 'DefaultCores': 2, 'DefaultThreadsPerCore': 2, 'ValidCores': [2], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 16384, 'TotalSizeInGB': 150, 'Disks': [{'SizeInGB': 150, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 400, 'BaselineThroughputInMBps': 50.0, 'BaselineIops': 1700, 'MaximumBandwidthInMbps': 3170, 'MaximumThroughputInMBps': 396.25, 'MaximumIops': 13333}, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2}], 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 1, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 8192, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'g4ad.xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0}, 'VCpuInfo': {'DefaultVCpus': 4, 'DefaultCores': 2, 'DefaultThreadsPerCore': 2, 'ValidCores': [2], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 16384}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 150, 'Disks': [{'SizeInGB': 150, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 400, 'BaselineThroughputInMBps': 50.0, 'BaselineIops': 1700, 'MaximumBandwidthInMbps': 3170, 'MaximumThroughputInMBps': 396.25, 'MaximumIops': 13333}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2}], 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'GpuInfo': {'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 1, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 8192}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False, 'SupportedBootModes': ['legacy-bios', 'uefi']}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0, 'DefaultVCpus': 8, 'DefaultCores': 4, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 32768, 'TotalSizeInGB': 300, 'Disks': [{'SizeInGB': 300, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 800, 'BaselineThroughputInMBps': 100.0, 'BaselineIops': 3400, 'MaximumBandwidthInMbps': 3170, 'MaximumThroughputInMBps': 396.25, 'MaximumIops': 13333}, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2}], 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 1, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 8192, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'g4ad.2xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0}, 'VCpuInfo': {'DefaultVCpus': 8, 'DefaultCores': 4, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 300, 'Disks': [{'SizeInGB': 300, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 800, 'BaselineThroughputInMBps': 100.0, 'BaselineIops': 3400, 'MaximumBandwidthInMbps': 3170, 'MaximumThroughputInMBps': 396.25, 'MaximumIops': 13333}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2}], 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'GpuInfo': {'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 1, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 8192}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False, 'SupportedBootModes': ['legacy-bios', 'uefi']}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0, 'DefaultVCpus': 16, 'DefaultCores': 8, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4, 8], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 65536, 'TotalSizeInGB': 600, 'Disks': [{'SizeInGB': 600, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 1580, 'BaselineThroughputInMBps': 197.5, 'BaselineIops': 6700, 'MaximumBandwidthInMbps': 3170, 'MaximumThroughputInMBps': 396.25, 'MaximumIops': 13333}, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3}], 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 1, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 8192, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'g4ad.4xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0}, 'VCpuInfo': {'DefaultVCpus': 16, 'DefaultCores': 8, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4, 8], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 65536}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 600, 'Disks': [{'SizeInGB': 600, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 1580, 'BaselineThroughputInMBps': 197.5, 'BaselineIops': 6700, 'MaximumBandwidthInMbps': 3170, 'MaximumThroughputInMBps': 396.25, 'MaximumIops': 13333}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3}], 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'GpuInfo': {'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 1, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 8192}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False, 'SupportedBootModes': ['legacy-bios', 'uefi']}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0, 'DefaultVCpus': 32, 'DefaultCores': 16, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4, 8, 16], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 131072, 'TotalSizeInGB': 1200, 'Disks': [{'SizeInGB': 1200, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 3170, 'BaselineThroughputInMBps': 396.25, 'BaselineIops': 13333, 'MaximumBandwidthInMbps': 3170, 'MaximumThroughputInMBps': 396.25, 'MaximumIops': 13333}, 'NetworkPerformance': '15 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': '15 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 2, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 16384, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'g4ad.8xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0}, 'VCpuInfo': {'DefaultVCpus': 32, 'DefaultCores': 16, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4, 8, 16], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 131072}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1200, 'Disks': [{'SizeInGB': 1200, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 3170, 'BaselineThroughputInMBps': 396.25, 'BaselineIops': 13333, 'MaximumBandwidthInMbps': 3170, 'MaximumThroughputInMBps': 396.25, 'MaximumIops': 13333}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': '15 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': '15 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'GpuInfo': {'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 2, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 16384}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False, 'SupportedBootModes': ['legacy-bios', 'uefi']}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0, 'DefaultVCpus': 64, 'DefaultCores': 32, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4, 8, 16, 32], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 262144, 'TotalSizeInGB': 2400, 'Disks': [{'SizeInGB': 1200, 'Count': 2, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 6300, 'BaselineThroughputInMBps': 787.5, 'BaselineIops': 26667, 'MaximumBandwidthInMbps': 6300, 'MaximumThroughputInMBps': 787.5, 'MaximumIops': 26667}, 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 4, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 32768, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'g4ad.16xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.0}, 'VCpuInfo': {'DefaultVCpus': 64, 'DefaultCores': 32, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4, 8, 16, 32], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 262144}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 2400, 'Disks': [{'SizeInGB': 1200, 'Count': 2, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 6300, 'BaselineThroughputInMBps': 787.5, 'BaselineIops': 26667, 'MaximumBandwidthInMbps': 6300, 'MaximumThroughputInMBps': 787.5, 'MaximumIops': 26667}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'GpuInfo': {'Gpus': [{'Name': 'Radeon Pro V520', 'Manufacturer': 'AMD', 'Count': 4, 'MemoryInfo': {'SizeInMiB': 8192}}], 'TotalGpuMemoryInMiB': 32768}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False, 'SupportedBootModes': ['legacy-bios', 'uefi']}] # noqa: E501
def get_instances_list() -> list:
'''Returns list EC2 instances with InstanceType = g4ad .'''
# pylint: disable=all
return get
| 1,302
| 15,432
| 0.726767
| 1,254
| 15,624
| 9.045455
| 0.11882
| 0.054659
| 0.023274
| 0.02539
| 0.952482
| 0.944107
| 0.934056
| 0.929384
| 0.929384
| 0.929384
| 0
| 0.058109
| 0.078085
| 15,624
| 11
| 15,433
| 1,420.363636
| 0.729381
| 0.008257
| 0
| 0
| 1
| 0
| 0.664643
| 0.253794
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 12
|
7742ef7a52b727a1e4fd17f9d5cae868c8829bcb
| 8,983
|
py
|
Python
|
checks/check_magic_eight.py
|
larrys/appinspect-best-practices
|
0b4ea9f5879516b6fa2fc5b5c361811709b40347
|
[
"Apache-2.0"
] | null | null | null |
checks/check_magic_eight.py
|
larrys/appinspect-best-practices
|
0b4ea9f5879516b6fa2fc5b5c361811709b40347
|
[
"Apache-2.0"
] | null | null | null |
checks/check_magic_eight.py
|
larrys/appinspect-best-practices
|
0b4ea9f5879516b6fa2fc5b5c361811709b40347
|
[
"Apache-2.0"
] | null | null | null |
import splunk_appinspect
import os
import regex as re
from splunk_appinspect.configuration_file import ConfigurationFile
from splunk_appinspect.splunk import normalizeBoolean
from .shared import _is_numeric, ignorable
@splunk_appinspect.tags("best_practices", "best_practices_magic_eight")
@splunk_appinspect.cert_version(min="2.14.1")
def check_should_linemerge(app, reporter):
"""Check that SHOULD_LINEMERGE is set to false"""
property = "SHOULD_LINEMERGE"
config_file_paths = app.get_config_file_paths("props.conf")
for directory, filename in iter(config_file_paths.items()):
file_path = os.path.join(directory, filename)
props_config: ConfigurationFile = app.props_conf(directory)
for stanza in props_config.sections():
if not stanza.has_option(property):
if not ignorable(stanza, ("should_linemerge", "magic8"), config=props_config):
output = f"{property} is not set for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
elif normalizeBoolean(stanza.get_option(property).value):
if not ignorable(stanza.get_option(property), ("should_linemerge", "magic8"), stanza=stanza, config=props_config):
output = f"{property} is true, when it should be false for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
@splunk_appinspect.tags("best_practices", "best_practices_magic_eight")
@splunk_appinspect.cert_version(min="2.14.1")
def check_line_breaker(app, reporter):
"""Check that LINE_BREAKER is set"""
property = "LINE_BREAKER"
config_file_paths = app.get_config_file_paths("props.conf")
for directory, filename in iter(config_file_paths.items()):
file_path = os.path.join(directory, filename)
props_config: ConfigurationFile = app.props_conf(directory)
for stanza in props_config.sections():
if not stanza.has_option(property):
if not ignorable(stanza, ("line_breaker", "magic8"), config=props_config):
output = f"{property} is not set for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
@splunk_appinspect.tags("best_practices", "best_practices_magic_eight")
@splunk_appinspect.cert_version(min="2.14.1")
def check_time_prefix(app, reporter):
"""Check that TIME_PREFIX is set"""
property = "TIME_PREFIX"
config_file_paths = app.get_config_file_paths("props.conf")
for directory, filename in iter(config_file_paths.items()):
file_path = os.path.join(directory, filename)
props_config: ConfigurationFile = app.props_conf(directory)
for stanza in props_config.sections():
if not stanza.has_option(property):
if not ignorable(stanza, ("time_prefix", "magic8"), config=props_config):
output = f"{property} is not set for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
@splunk_appinspect.tags("best_practices", "best_practices_magic_eight")
@splunk_appinspect.cert_version(min="2.14.1")
def check_max_timestamp_lookahead(app, reporter):
"""Check that MAX_TIMESTAMP_LOOKAHEAD is set, numeric and >= 0"""
property = "MAX_TIMESTAMP_LOOKAHEAD"
config_file_paths = app.get_config_file_paths("props.conf")
for directory, filename in iter(config_file_paths.items()):
file_path = os.path.join(directory, filename)
props_config: ConfigurationFile = app.props_conf(directory)
for stanza in props_config.sections():
if not stanza.has_option(property):
if not ignorable(stanza, ("max_timestamp_lookahead", "magic8"), config=props_config):
output = f"{property} is not set for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
else:
setting = stanza.get_option(property)
if not _is_numeric(setting.value):
if not ignorable(setting, ("max_timestamp_lookahead", "magic8"), stanza=stanza, config=props_config):
output = f"{property} is not numeric for [{stanza.name}] ({setting.value})"
reporter.warn(output, file_path, setting.lineno)
elif not int(setting.value) >= 0:
if not ignorable(setting, ("max_timestamp_lookahead", "magic8"), stanza=stanza, config=props_config):
output = f"{property} is not >= 0 [{stanza.name}] ({setting.value})"
reporter.warn(output, file_path, setting.lineno)
@splunk_appinspect.tags("best_practices", "best_practices_magic_eight")
@splunk_appinspect.cert_version(min="2.14.1")
def check_time_format(app, reporter):
"""Check that TIME_FORMAT is set"""
property = "TIME_FORMAT"
config_file_paths = app.get_config_file_paths("props.conf")
for directory, filename in iter(config_file_paths.items()):
file_path = os.path.join(directory, filename)
props_config: ConfigurationFile = app.props_conf(directory)
for stanza in props_config.sections():
if not stanza.has_option(property):
if not ignorable(stanza, ("time_format", "magic8"), config=props_config):
output = f"{property} is not set for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
@splunk_appinspect.tags("best_practices", "best_practices_magic_eight")
@splunk_appinspect.cert_version(min="2.14.1")
def check_truncate(app, reporter):
"""Check that TRUNCATE is set"""
property = "TRUNCATE"
config_file_paths = app.get_config_file_paths("props.conf")
for directory, filename in iter(config_file_paths.items()):
file_path = os.path.join(directory, filename)
props_config: ConfigurationFile = app.props_conf(directory)
for stanza in props_config.sections():
if not stanza.has_option(property):
if not ignorable(stanza, ("truncate", "magic8"), config=props_config):
output = f"{property} is not set for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
else:
setting = stanza.get_option(property)
if not _is_numeric(setting.value):
if not ignorable(setting, ("truncate", "magic8"), stanza=stanza, config=props_config):
output = f"{property} is not numeric for [{stanza.name}] ({setting.value})"
reporter.warn(output, file_path, setting.lineno)
elif not int(setting.value) > 0:
if not ignorable(setting, ("truncate", "magic8"), stanza=stanza, config=props_config):
output = f"{property} is not > 0 [{stanza.name}] ({setting.value})"
reporter.warn(output, file_path, setting.lineno)
@splunk_appinspect.tags("best_practices", "best_practices_magic_eight")
@splunk_appinspect.cert_version(min="2.14.1")
def check_event_breaker_enable(app, reporter):
"""Check that EVENT_BREAKER_ENABLE is set to true"""
property = "EVENT_BREAKER_ENABLE"
config_file_paths = app.get_config_file_paths("props.conf")
for directory, filename in iter(config_file_paths.items()):
file_path = os.path.join(directory, filename)
props_config: ConfigurationFile = app.props_conf(directory)
for stanza in props_config.sections():
if not stanza.has_option(property):
if not ignorable(stanza, ("truncate", "magic8"), config=props_config):
output = f"{property} is not set for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
elif not normalizeBoolean(stanza.get_option(property).value):
if not ignorable(stanza.get_option(property), ("truncate", "magic8"), stanza=stanza, config=props_config):
output = f"{property} is false, when it should be true for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
@splunk_appinspect.tags("best_practices", "best_practices_magic_eight")
@splunk_appinspect.cert_version(min="2.14.1")
def check_event_breaker(app, reporter):
"""Check that EVENT_BREAKER is set"""
property = "EVENT_BREAKER"
config_file_paths = app.get_config_file_paths("props.conf")
for directory, filename in iter(config_file_paths.items()):
file_path = os.path.join(directory, filename)
props_config: ConfigurationFile = app.props_conf(directory)
for stanza in props_config.sections():
if not stanza.has_option(property):
if not ignorable(stanza, ("event_breaker", "magic8"), config=props_config):
output = f"{property} is not set for [{stanza.name}]"
reporter.warn(output, file_path, stanza.lineno)
| 55.110429
| 130
| 0.662808
| 1,099
| 8,983
| 5.196542
| 0.077343
| 0.057783
| 0.063036
| 0.056382
| 0.872877
| 0.859744
| 0.848538
| 0.848538
| 0.848538
| 0.848538
| 0
| 0.007322
| 0.224647
| 8,983
| 162
| 131
| 55.450617
| 0.812635
| 0.033396
| 0
| 0.724638
| 0
| 0
| 0.176464
| 0.034714
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057971
| false
| 0
| 0.043478
| 0
| 0.101449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7748ed8574def444c18d94429a5e192b0f209065
| 9,708
|
gyp
|
Python
|
glue.gyp
|
federicamontes/scal
|
16c7f000c975f88617498cd797ed79398fe5ee61
|
[
"Unlicense"
] | null | null | null |
glue.gyp
|
federicamontes/scal
|
16c7f000c975f88617498cd797ed79398fe5ee61
|
[
"Unlicense"
] | null | null | null |
glue.gyp
|
federicamontes/scal
|
16c7f000c975f88617498cd797ed79398fe5ee61
|
[
"Unlicense"
] | null | null | null |
{
'includes': [
'common.gypi',
],
'targets': [
{
'target_name': 'ms',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_ms_queue.cc'
],
},
{
'target_name': 'treiber',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_treiber_stack.cc'
],
},
{
'target_name': 'kstack',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_kstack.cc'
],
},
{
'target_name': 'll-kstack',
'type': 'static_library',
'cflags': [ '-DLOCALLY_LINEARIZABLE' ],
'sources': [
'src/benchmark/std_glue/glue_ll_kstack.cc'
],
},
{
'target_name': 'dds-1random-ms',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_dds_1random_ms.cc'
],
},
{
'target_name': 'dds-1random-treiber',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_dds_1random_treiber.cc'
],
},
{
'target_name': 'dds-partrr-ms',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_dds_partrr_ms.cc'
],
},
{
'target_name': 'dds-partrr-treiber',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_dds_partrr_treiber.cc'
],
},
{
'target_name': 'fc',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_fc_queue.cc'
],
},
{
'target_name': 'rd',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_rd_queue.cc'
],
},
{
'target_name': 'sq',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_sq_queue.cc'
],
},
{
'target_name': 'us-kfifo',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_uskfifo.cc'
],
},
{
'target_name': 'll-us-kfifo',
'type': 'static_library',
'cflags': [ '-DLOCALLY_LINEARIZABLE' ],
'sources': [
'src/benchmark/std_glue/glue_uskfifo.cc'
],
},
{
'target_name': 'bs-kfifo',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_bskfifo.cc'
],
},
{
'target_name': 'll-dds-ms',
'type': 'static_library',
'defines': [ 'GET_TRY_LOCAL_FIRST' ],
'sources': [
'src/benchmark/std_glue/glue_ll_dds_ms.cc'
],
},
{
'target_name': 'll-dds-treiber',
'type': 'static_library',
'defines': [ 'GET_TRY_LOCAL_FIRST' ],
'sources': [
'src/benchmark/std_glue/glue_ll_dds_treiber.cc'
],
},
{
'target_name': 'll-dds-ms-nonlinempty',
'type': 'static_library',
'defines': [
'BACKEND_MS_QUEUE',
'BALANCER_LL',
'NON_LINEARIZABLE_EMPTY'
],
'sources': [
'src/benchmark/std_glue/glue_dds.cc'
],
},
{
'target_name': 'll-dds-treiber-nonlinempty',
'type': 'static_library',
'defines': [
'BACKEND_TREIBER',
'BALANCER_LL',
'NON_LINEARIZABLE_EMPTY'
],
'sources': [
'src/benchmark/std_glue/glue_dds.cc'
],
},
{
'target_name': 'll-dyn-dds-ms-nonlinempty',
'type': 'static_library',
'defines': [
'BACKEND_MS_QUEUE',
'NON_LINEARIZABLE_EMPTY'
],
'sources': [
'src/benchmark/std_glue/glue_dyn_dds.cc'
],
},
{
'target_name': 'll-dyn-dds-treiber-nonlinempty',
'type': 'static_library',
'defines': [
'BACKEND_TREIBER',
'NON_LINEARIZABLE_EMPTY'
],
'sources': [
'src/benchmark/std_glue/glue_dyn_dds.cc'
],
},
{
'target_name': 'll-dyn-dds-ms',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_ll_dyn_dds_ms.cc'
],
},
{
'target_name': 'll-dyn-dds-treiber',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_ll_dyn_dds_treiber.cc'
],
},
{
'target_name': 'lcrq',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_lcrq.cc'
],
'dependencies': [
'upstream.gyp:lcrq-base',
],
},
{
'target_name': 'lb-stack',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_lb_stack.cc'
],
},
{
'target_name': 'lb-queue',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_lb_queue.cc'
],
},
{
'target_name': 'hc-ts-cas-stack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_cas_stack.cc'
],
},
{
'target_name': 'hc-ts-stutter-stack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_stutter_stack.cc'
],
},
{
'target_name': 'hc-ts-interval-stack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_interval_stack.cc'
],
},
{
'target_name': 'hc-ts-atomic-stack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_atomic_stack.cc'
],
},
{
'target_name': 'hc-ts-hardware-stack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_hardware_stack.cc'
],
},
{
'target_name': 'hc-ts-cas-queue',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_cas_queue.cc'
],
},
{
'target_name': 'hc-ts-stutter-queue',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_stutter_queue.cc'
],
},
{
'target_name': 'hc-ts-interval-queue',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_interval_queue.cc'
],
},
{
'target_name': 'hc-ts-atomic-queue',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_atomic_queue.cc'
],
},
{
'target_name': 'hc-ts-hardware-queue',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_hardcoded_ts_hardware_queue.cc'
],
},
{
'target_name': 'rts-queue',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_rts_queue.cc'
],
},
{
'target_name': 'cts-queue',
'type': 'static_library',
'sources': [
'src/benchmark/std_glue/glue_cts_queue.cc'
],
},
{
'target_name': 'ts-cas-deque',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_ts_cas_deque.cc'
],
},
{
'target_name': 'ts-stutter-deque',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_ts_stutter_deque.cc'
],
},
{
'target_name': 'ts-interval-deque',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_ts_interval_deque.cc'
],
},
{
'target_name': 'ts-atomic-deque',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_ts_atomic_deque.cc'
],
},
{
'target_name': 'ts-hardware-deque',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_ts_hardware_deque.cc'
],
},
{
'target_name': 'eb-stack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_eb_stack.cc'
],
},
{
'target_name': 'wf-queue',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_wf_ppopp12.cc'
],
},
{
'target_name': 'lru-dds-ms',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_lru_dds_ms.cc'
],
},
{
'target_name': 'lru-dds-treiber-stack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_lru_dds_treiber_stack.cc'
],
},
{
'target_name': 'sc-stack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_sc_stack.cc'
],
},
{
'target_name': 'sc-distributed-stack-treiber',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_sc_distributed_stack_treiber.cc'
],
},
{
'target_name': 'sc-distributed-stack-ebstack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_sc_distributed_stack_ebstack.cc'
],
},
{
'target_name': 'sc-distributed-ts-hardware-stack',
'type': 'static_library',
'cflags': [ ],
'sources': [
'src/benchmark/std_glue/glue_sc_distributed_ts_hardware_stack.cc'
],
},
]
}
| 23.735941
| 73
| 0.517202
| 968
| 9,708
| 4.864669
| 0.072314
| 0.10618
| 0.180505
| 0.233595
| 0.898705
| 0.828414
| 0.746655
| 0.741983
| 0.741346
| 0.677001
| 0
| 0.00089
| 0.30583
| 9,708
| 408
| 74
| 23.794118
| 0.697878
| 0
| 0
| 0.504902
| 0
| 0
| 0.54862
| 0.272765
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
621344e2705b80726bfd2c700bcab87bed4a51ec
| 7,117
|
py
|
Python
|
aeiot/migrations/0002_auto_20160916_1625.py
|
ratkowskia/aeiot
|
e41a2e937facb13c63a0666a7aa1c957b68b6ab4
|
[
"BSD-3-Clause"
] | null | null | null |
aeiot/migrations/0002_auto_20160916_1625.py
|
ratkowskia/aeiot
|
e41a2e937facb13c63a0666a7aa1c957b68b6ab4
|
[
"BSD-3-Clause"
] | null | null | null |
aeiot/migrations/0002_auto_20160916_1625.py
|
ratkowskia/aeiot
|
e41a2e937facb13c63a0666a7aa1c957b68b6ab4
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-09-16 16:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('aeiot', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Algorithm',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('semantics', models.CharField(max_length=200)),
('source_code', models.CharField(max_length=200)),
('version', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='AlgorithmExecution',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('algorithm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.Algorithm')),
],
),
migrations.CreateModel(
name='Billing',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='BillingRecord',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('algorithm_execution', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.AlgorithmExecution')),
('billing', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.Billing')),
],
),
migrations.CreateModel(
name='Consumer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='DataCollection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='DataFormat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('semantics', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Supplier',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='CPU',
fields=[
('resource_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='aeiot.Resource')),
],
bases=('aeiot.resource',),
),
migrations.CreateModel(
name='DataSet',
fields=[
('datacollection_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='aeiot.DataCollection')),
('data_name', models.CharField(max_length=200)),
],
bases=('aeiot.datacollection',),
),
migrations.CreateModel(
name='ResultSet',
fields=[
('datacollection_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='aeiot.DataCollection')),
('result_name', models.CharField(max_length=200)),
],
bases=('aeiot.datacollection',),
),
migrations.CreateModel(
name='Storage',
fields=[
('resource_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='aeiot.Resource')),
],
bases=('aeiot.resource',),
),
migrations.AddField(
model_name='billingrecord',
name='resource',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.Resource'),
),
migrations.AddField(
model_name='billing',
name='consumer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.Consumer'),
),
migrations.AddField(
model_name='billing',
name='supplier',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.Supplier'),
),
migrations.AddField(
model_name='algorithmexecution',
name='consumer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.Consumer'),
),
migrations.AddField(
model_name='algorithmexecution',
name='resource',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.Resource'),
),
migrations.AddField(
model_name='algorithm',
name='input_format',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='alg_input_format', to='aeiot.DataFormat'),
),
migrations.AddField(
model_name='algorithm',
name='output_format',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='alg_otuput_format', to='aeiot.DataFormat'),
),
migrations.AddField(
model_name='algorithm',
name='supplier',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.Supplier'),
),
migrations.AddField(
model_name='algorithmexecution',
name='data_set',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.DataSet'),
),
migrations.AddField(
model_name='algorithmexecution',
name='result_set',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aeiot.ResultSet'),
),
]
| 43.133333
| 205
| 0.582268
| 678
| 7,117
| 5.960177
| 0.126844
| 0.037614
| 0.062361
| 0.097996
| 0.862163
| 0.855481
| 0.828013
| 0.828013
| 0.808958
| 0.808958
| 0
| 0.011557
| 0.282703
| 7,117
| 164
| 206
| 43.396341
| 0.78002
| 0.009414
| 0
| 0.757962
| 1
| 0
| 0.132681
| 0.003406
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019108
| 0
| 0.038217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
624c04c92a0a6c4c49bf8846d9d66c53c232f18c
| 15,413
|
py
|
Python
|
tests/test_client_arm_disarm.py
|
DanielDidWhat/total-connect-client
|
4baab6855380521833fc4bedc11a3cb870ba90c9
|
[
"MIT"
] | null | null | null |
tests/test_client_arm_disarm.py
|
DanielDidWhat/total-connect-client
|
4baab6855380521833fc4bedc11a3cb870ba90c9
|
[
"MIT"
] | null | null | null |
tests/test_client_arm_disarm.py
|
DanielDidWhat/total-connect-client
|
4baab6855380521833fc4bedc11a3cb870ba90c9
|
[
"MIT"
] | null | null | null |
"""Test total_connect_client."""
from unittest.mock import patch
import unittest
import pytest
from total_connect_client.client import TotalConnectClient
from total_connect_client.exceptions import BadResultCodeError, AuthenticationError
from common import create_client
from const import (
LOCATION_INFO_BASIC_NORMAL,
RESPONSE_ARMED_AWAY,
RESPONSE_ARMED_STAY,
RESPONSE_ARMED_STAY_NIGHT,
RESPONSE_DISARMED,
RESPONSE_FEATURE_NOT_SUPPORTED,
)
TCC_REQUEST_METHOD = "total_connect_client.client.TotalConnectClient.request"
RESPONSE_ARM_SUCCESS = {
"ResultCode": TotalConnectClient.ARM_SUCCESS,
"ResultData": "testing arm success",
}
RESPONSE_DISARM_SUCCESS = {
"ResultCode": TotalConnectClient.DISARM_SUCCESS,
"ResultData": "testing disarm success",
}
# returned when a zone is faulted
RESPONSE_ARM_FAILED = {
"ResultCode": TotalConnectClient.COMMAND_FAILED,
"ResultData": "testing arm failed",
}
RESPONSE_DISARM_FAILED = {
"ResultCode": TotalConnectClient.COMMAND_FAILED,
"ResultData": "testing disarm failed",
}
# appears to be for a bad/wrong code
RESPONSE_USER_CODE_INVALID = {
"ResultCode": TotalConnectClient.USER_CODE_INVALID,
"ResultData": "testing user code invalid",
}
# appears to be for a code entered for a wrong device/location
RESPONSE_USER_CODE_UNAVAILABLE = {
"ResultCode": TotalConnectClient.USER_CODE_UNAVAILABLE,
"ResultData": "testing user code unavailable",
}
RESPONSE_SUCCESS = {
"ResultCode": TotalConnectClient.SUCCESS,
"ResultData": "testing success",
}
class TestTotalConnectClient(unittest.TestCase):
"""Test TotalConnectClient."""
def setUp(self):
"""Test setup."""
self.client = None
self.location_id = LOCATION_INFO_BASIC_NORMAL["LocationID"]
def tearDown(self):
"""Test cleanup."""
self.client = None
def tests_arm_away(self):
"""Test arm away."""
# first test with no issues
self.client = create_client()
responses = [RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
self.client.arm_away(self.location_id)
# confirm armed_away
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is True
# second test with a zone faulted
self.client = create_client()
responses = [RESPONSE_ARM_FAILED, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_away(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
# third test with bad usercode
self.client = create_client()
responses = [RESPONSE_USER_CODE_INVALID, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_away(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
# fourth test with 'unavailable' usercode
self.client = create_client()
responses = [RESPONSE_USER_CODE_UNAVAILABLE, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(AuthenticationError):
self.client.arm_away(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
# fifth test with 'other' usercode
self.client = create_client()
responses = [RESPONSE_FEATURE_NOT_SUPPORTED, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_away(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
def tests_arm_away_instant(self):
"""Test arm away instant."""
# first test with no issues
self.client = create_client()
responses = [RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
self.client.arm_away_instant(self.location_id)
# confirm armed_away
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is True
# second test with a zone faulted
self.client = create_client()
responses = [RESPONSE_ARM_FAILED, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_away_instant(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
# third test with bad usercode
self.client = create_client()
responses = [RESPONSE_USER_CODE_INVALID, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_away_instant(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
def tests_arm_stay(self):
"""Test arm stay."""
# first test with no issues
self.client = create_client()
responses = [RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_STAY]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
self.client.arm_stay(self.location_id)
# confirm armed_away
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_home() is True
# second test with a zone faulted
self.client = create_client()
responses = [RESPONSE_ARM_FAILED, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_stay(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_home() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
# third test with bad usercode
self.client = create_client()
responses = [RESPONSE_USER_CODE_INVALID, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_stay(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_home() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
def tests_arm_stay_instant(self):
"""Test arm stay instant."""
# first test with no issues
self.client = create_client()
responses = [RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_STAY]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
self.client.arm_stay_instant(self.location_id)
# confirm armed_away
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_home() is True
# second test with a zone faulted
self.client = create_client()
responses = [RESPONSE_ARM_FAILED, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_stay_instant(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_home() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
# third test with bad usercode
self.client = create_client()
responses = [RESPONSE_USER_CODE_INVALID, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_stay_instant(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_home() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
def tests_arm_stay_night(self):
"""Test arm stay night."""
# first test with no issues
self.client = create_client()
responses = [RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_STAY_NIGHT]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
self.client.arm_stay_night(self.location_id)
# confirm armed_away
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_night() is True
# second test with a zone faulted
self.client = create_client()
responses = [RESPONSE_ARM_FAILED, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_stay_night(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_night() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
# third test with bad usercode
self.client = create_client()
responses = [RESPONSE_USER_CODE_INVALID, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
with pytest.raises(BadResultCodeError):
self.client.arm_stay_night(self.location_id)
# should still be disarmed
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_night() is False
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
def tests_disarm(self):
"""Test disarm."""
# first test with no issues
self.client = create_client()
responses = [
RESPONSE_ARM_SUCCESS,
RESPONSE_ARMED_AWAY,
RESPONSE_DISARM_SUCCESS,
RESPONSE_DISARMED,
]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
# arm the system and confirm armed_away
self.client.arm_away(self.location_id)
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is True
# now disarm
self.client.disarm(self.location_id)
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
def tests_disarm_command_failed(self):
"""Test disarm with command failed."""
# first test with no issues
self.client = create_client()
responses = [
RESPONSE_ARM_SUCCESS,
RESPONSE_ARMED_AWAY,
RESPONSE_DISARM_FAILED,
RESPONSE_ARMED_AWAY,
]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
# arm the system and confirm armed_away
self.client.arm_away(self.location_id)
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is True
# now disarm, should fail
with pytest.raises(BadResultCodeError):
self.client.disarm(self.location_id)
# should still be armed_away
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is True
def tests_disarm_user_code_invalid(self):
"""Test disarm with invalid user code."""
# first test with no issues
self.client = create_client()
responses = [
RESPONSE_ARM_SUCCESS,
RESPONSE_ARMED_AWAY,
RESPONSE_USER_CODE_INVALID,
RESPONSE_ARMED_AWAY,
]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
# arm the system and confirm armed_away
self.client.arm_away(self.location_id)
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is True
with pytest.raises(BadResultCodeError):
self.client.disarm(self.location_id)
# should still be armed_away when disarming fails
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_armed_away() is True
def tests_disarm_disarmed(self):
"""Test attempt to disarm an already disarmed system."""
# Did this once on my Lynx 7000 and it gave result code SUCCESS
self.client = create_client()
responses = [RESPONSE_DISARMED, RESPONSE_SUCCESS, RESPONSE_DISARMED]
with patch(TCC_REQUEST_METHOD, side_effect=responses):
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
# now disarm
self.client.disarm(self.location_id)
self.client.get_panel_meta_data(self.location_id)
assert self.client.locations[self.location_id].arming_state.is_disarmed() is True
| 43.173669
| 97
| 0.685914
| 1,883
| 15,413
| 5.324482
| 0.063728
| 0.108717
| 0.121484
| 0.09226
| 0.833633
| 0.829144
| 0.825254
| 0.8078
| 0.807301
| 0.801815
| 0
| 0.000339
| 0.234477
| 15,413
| 356
| 98
| 43.294944
| 0.849394
| 0.111205
| 0
| 0.728814
| 0
| 0
| 0.025985
| 0.003975
| 0
| 0
| 0
| 0
| 0.15678
| 1
| 0.04661
| false
| 0
| 0.029661
| 0
| 0.080508
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
626e6c09f08958d78e1a96c9b4eba4f98fb25e05
| 1,578
|
py
|
Python
|
PYTHON/Regex_and_Parsing/validating_uid.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
PYTHON/Regex_and_Parsing/validating_uid.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
PYTHON/Regex_and_Parsing/validating_uid.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import re
for _ in range(int(input())):
m = re.match(r'(?!([a-zA-Z0-9]){1,}.*?\1)(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)', input())
if m is None:
print('Invalid')
else:
print('Valid')
# TEST
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?=^(?:([\s\w\d])(?!.*\1))*$)', input())
# m = re.match(r'(?=(.*\d+){3,})', n)
# m = re.match(r'(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)', n)
# m = re.match(r'([a-zA-Z0-9]).*?\1', n)
# m = re.match(r'([a-zA-Z0-9]).*?\1+', n)
# m = re.match(r'([a-zA-Z0-9]){1,}.*?\1+', n)
'''
m = re.match(r'(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)', n)
m = re.match(r'(?!([a-zA-Z0-9]){1,}.*?\1)', n)
m = re.match(r'(?!([a-zA-Z0-9]){1,}.*?\1+)', n)
m = re.match(r'(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)(?!([a-zA-Z0-9]){1,}.*?\1+)', n)
m = re.match(r'(?=(.*\d+){3,})(?=(.*[A-Z]+){2,})(?=^[\d\w]{10}$)(?!([a-zA-Z0-9]){1,}.*?\1)', n)
'''
# n = input()
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?=^(?:([\s\w\d])(?!.*\1))*$)', n)
# print(m, n)
# m = re.match(r'(?=.*[0-9]){3,}(?=.*[A-Z]){2,}', input())
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?=(.)\1)', input())
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?:([\s\w\d])(?!.*\1))', input())
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})', n)
# m = re.match(r'(?=^[\d\w]{10}$)(?=.*[0-9]{3,})(?=.*[A-Z]{2,})(?:([\s\w\d])(?!.*\1))', n)
| 41.526316
| 107
| 0.308619
| 281
| 1,578
| 1.729537
| 0.128114
| 0.111111
| 0.296296
| 0.333333
| 0.81893
| 0.788066
| 0.771605
| 0.771605
| 0.753086
| 0.753086
| 0
| 0.071905
| 0.14512
| 1,578
| 37
| 108
| 42.648649
| 0.288362
| 0.54943
| 0
| 0
| 0
| 0.142857
| 0.284314
| 0.245098
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.285714
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
627714ba34912d7feedd931e114dc1933d5e6a1f
| 20,077
|
py
|
Python
|
userbot/plugins/feds.py
|
Subibhu/ZeBot
|
f78998ad1ea981abc8f195292dad206570695fb1
|
[
"Apache-2.0"
] | null | null | null |
userbot/plugins/feds.py
|
Subibhu/ZeBot
|
f78998ad1ea981abc8f195292dad206570695fb1
|
[
"Apache-2.0"
] | null | null | null |
userbot/plugins/feds.py
|
Subibhu/ZeBot
|
f78998ad1ea981abc8f195292dad206570695fb1
|
[
"Apache-2.0"
] | null | null | null |
from telethon import events
import random, re
from userbot.utils import admin_cmd
import asyncio
@borg.on(admin_cmd("fbanbisi ?(.*)"))
async def _(event):
if event.fwd_from:
return
await event.edit("Bringing Justice...")
FBAN = event.pattern_match.group(1)
chat = -1001390230877
if not event.text[0].isalpha() and event.text[0] not in ("/", "#", "@", "!"):
await borg.send_message(chat, f"/start")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a26a769d-616f-486f-b89f-fac0cb0a5c11 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 88c4a4fe-4a8a-4880-87d5-d83554455918 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 37708662-274e-4264-a61f-26ce50fcacda ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed ca91a4e6-d6fa-4209-b8b1-6584d8f1dbf3 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 8eb707da-3894-4611-8766-48d408324969 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a4bcd10a-8f14-4bf3-9226-13fb873a0316 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 0b3371e6-0b20-4fb6-9285-6689aa9035aa ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b65416ba-6b72-487a-8cf9-0d9b857cd234 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 54cef9da-112c-4d63-9fa0-dc0fb3ced3fd ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 76f8fd60-1745-410a-975b-a021ce6da365 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed abe54d90-d67a-4ce8-afbc-0cd827084ecf ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5fcbde53-88cb-40a9-916f-f04223476663 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed c06a32d6-c9b9-4840-91e9-29b4de14ceff ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed e1c0cf1f-220a-46bb-b508-46eb6b1ecf55 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 8feb6ad8-1655-4dbc-9dc4-3d0546a3c1e1 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5d47a47c-9ee1-405b-bb33-d4cadfd0c102 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a3048abf-43d1-4291-a92e-822e883b2ae0 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 924e05f0-2c93-4a81-a91a-a9cb4ff1993c ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 0e32f678-9c5f-47d5-b512-a3a607f1fd99 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 7389172d-b5ad-42bb-80af-5d0b50605390 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 1f1cc20a-9f44-43ec-b2c0-9a4713b4fddb ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed ca7a9f6e-3316-431c-b6d8-4bb929922bc4 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed f0321efe-83d3-4dde-aef0-fdb36df5bc50 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5e9e0a69-15cb-4998-bbd0-4e782ba8b649 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 52ad135b-a917-4d08-9d9d-d7adca31d4ac ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b71af67b-c39b-4606-b7ae-6de20880229a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a2f1c742-f322-42d4-9966-7e2e0ad0ee70 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a9e59bba-5725-45a4-82f0-95412986f838 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed e3e97adc-9e83-43a7-abf8-b2839ab4481f ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed fc7d5b2b-fbde-47ad-b11a-8d6edaaeb69f ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed dc8fccc6-5505-4337-afe8-26843bc899fa ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 4591dedc-8b3b-4bfc-91a2-b13bfc7f2d78 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 84a464bb-61f5-49e9-b8b7-90d321fe458e ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 96bf9e56-cfcd-483d-afb4-c63dfcfc5dc8 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed f665c8c9-9502-4e1b-9dc7-64337de00bfc ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed c896db4f-4ea4-4251-95e0-defccdc2b7ef ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 7d054dd3-b578-4fca-925d-55e87170732d ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 3901c154-532b-4075-98b2-bbb889d614c9 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 10b712f6-53c6-4e68-9224-b84b21b198d0 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b67015c6-a867-4dc2-839d-a03b13089c48 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 26fedd5f-9666-4733-8f33-7b667b99abb6 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 25f073df-1b7f-41ac-8f05-6489207e613a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 306e1ae6-ca72-4618-9474-046cbcb04fa2 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 955a6ec4-2ca5-4f60-a1fe-9dc89efde33a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 955fa90c-0235-4df9-ac8e-95825d441e0a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed cc1fa3af-06d2-4aaa-8572-0d25bb7b7b51 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await asyncio.sleep(1)
await asyncio.sleep(10)
await event.edit("Justice Has Been Served")
@borg.on(admin_cmd("unfbanbisi ?(.*)"))
async def _(event):
if event.fwd_from:
return
await event.edit("Bringing Justice...")
UNFBAN = event.pattern_match.group(1)
chat = -1001390230877
if not event.text[0].isalpha() and event.text[0] not in ("/", "#", "@", "!"):
await borg.send_message(chat, f"/start")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a26a769d-616f-486f-b89f-fac0cb0a5c11 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 88c4a4fe-4a8a-4880-87d5-d83554455918 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 37708662-274e-4264-a61f-26ce50fcacda ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed ca91a4e6-d6fa-4209-b8b1-6584d8f1dbf3 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 8eb707da-3894-4611-8766-48d408324969 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a4bcd10a-8f14-4bf3-9226-13fb873a0316 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 0b3371e6-0b20-4fb6-9285-6689aa9035aa ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b65416ba-6b72-487a-8cf9-0d9b857cd234 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 54cef9da-112c-4d63-9fa0-dc0fb3ced3fd ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 76f8fd60-1745-410a-975b-a021ce6da365 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed abe54d90-d67a-4ce8-afbc-0cd827084ecf ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5fcbde53-88cb-40a9-916f-f04223476663 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed c06a32d6-c9b9-4840-91e9-29b4de14ceff ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed e1c0cf1f-220a-46bb-b508-46eb6b1ecf55 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 8feb6ad8-1655-4dbc-9dc4-3d0546a3c1e1 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5d47a47c-9ee1-405b-bb33-d4cadfd0c102 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a3048abf-43d1-4291-a92e-822e883b2ae0 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 924e05f0-2c93-4a81-a91a-a9cb4ff1993c ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 0e32f678-9c5f-47d5-b512-a3a607f1fd99 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 7389172d-b5ad-42bb-80af-5d0b50605390 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 1f1cc20a-9f44-43ec-b2c0-9a4713b4fddb ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed ca7a9f6e-3316-431c-b6d8-4bb929922bc4 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed f0321efe-83d3-4dde-aef0-fdb36df5bc50 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5e9e0a69-15cb-4998-bbd0-4e782ba8b649 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 52ad135b-a917-4d08-9d9d-d7adca31d4ac ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b71af67b-c39b-4606-b7ae-6de20880229a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a2f1c742-f322-42d4-9966-7e2e0ad0ee70 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a9e59bba-5725-45a4-82f0-95412986f838 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed e3e97adc-9e83-43a7-abf8-b2839ab4481f ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed fc7d5b2b-fbde-47ad-b11a-8d6edaaeb69f ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed dc8fccc6-5505-4337-afe8-26843bc899fa ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 4591dedc-8b3b-4bfc-91a2-b13bfc7f2d78 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 84a464bb-61f5-49e9-b8b7-90d321fe458e ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 96bf9e56-cfcd-483d-afb4-c63dfcfc5dc8 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed f665c8c9-9502-4e1b-9dc7-64337de00bfc ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed c896db4f-4ea4-4251-95e0-defccdc2b7ef ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 7d054dd3-b578-4fca-925d-55e87170732d ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 3901c154-532b-4075-98b2-bbb889d614c9 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 10b712f6-53c6-4e68-9224-b84b21b198d0 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b67015c6-a867-4dc2-839d-a03b13089c48 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 26fedd5f-9666-4733-8f33-7b667b99abb6 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 25f073df-1b7f-41ac-8f05-6489207e613a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 306e1ae6-ca72-4618-9474-046cbcb04fa2 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 955a6ec4-2ca5-4f60-a1fe-9dc89efde33a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 955fa90c-0235-4df9-ac8e-95825d441e0a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed cc1fa3af-06d2-4aaa-8572-0d25bb7b7b51 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await asyncio.sleep(10)
await event.edit("Justice Has Been Served")
| 49.942786
| 87
| 0.655676
| 2,623
| 20,077
| 4.944339
| 0.106367
| 0.174879
| 0.247745
| 0.259542
| 0.989436
| 0.989436
| 0.989436
| 0.989436
| 0.989436
| 0.989436
| 0
| 0.12846
| 0.217164
| 20,077
| 401
| 88
| 50.067332
| 0.696698
| 0
| 0
| 0.979849
| 0
| 0
| 0.281616
| 0.164965
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010076
| 0
| 0.015113
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
65bbd691641ec210fe50ab685ca455f25c59d580
| 61,163
|
py
|
Python
|
octavia/tests/functional/api/v2/test_l7rule.py
|
zhangi/octavia
|
e68c851fecf55e1b5ffe7d5b849f729626af28a3
|
[
"Apache-2.0"
] | 129
|
2015-06-23T08:06:23.000Z
|
2022-03-31T12:38:20.000Z
|
octavia/tests/functional/api/v2/test_l7rule.py
|
zhangi/octavia
|
e68c851fecf55e1b5ffe7d5b849f729626af28a3
|
[
"Apache-2.0"
] | 6
|
2016-05-20T11:05:27.000Z
|
2021-03-23T06:05:52.000Z
|
octavia/tests/functional/api/v2/test_l7rule.py
|
zhangi/octavia
|
e68c851fecf55e1b5ffe7d5b849f729626af28a3
|
[
"Apache-2.0"
] | 166
|
2015-07-15T16:24:05.000Z
|
2022-03-02T20:54:36.000Z
|
# Copyright 2016 Blue Box, an IBM Company
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_utils import uuidutils
from octavia.common import constants
import octavia.common.context
from octavia.common import data_models
from octavia.common import exceptions
from octavia.db import repositories
from octavia.tests.functional.api.v2 import base
class TestL7Rule(base.BaseAPITest):
root_tag = 'rule'
root_tag_list = 'rules'
root_tag_links = 'rules_links'
def setUp(self):
super().setUp()
self.lb = self.create_load_balancer(uuidutils.generate_uuid())
self.lb_id = self.lb.get('loadbalancer').get('id')
self.project_id = self.lb.get('loadbalancer').get('project_id')
self.set_lb_status(self.lb_id)
self.listener = self.create_listener(
constants.PROTOCOL_HTTP, 80, lb_id=self.lb_id)
self.listener_id = self.listener.get('listener').get('id')
self.set_lb_status(self.lb_id)
self.l7policy = self.create_l7policy(
self.listener_id, constants.L7POLICY_ACTION_REJECT)
self.l7policy_id = self.l7policy.get('l7policy').get('id')
self.set_lb_status(self.lb_id)
self.l7rules_path = self.L7RULES_PATH.format(
l7policy_id=self.l7policy_id)
self.l7rule_path = self.l7rules_path + '/{l7rule_id}'
self.l7policy_repo = repositories.L7PolicyRepository()
def test_get(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api', tags=['test_tag']).get(self.root_tag)
response = self.get(self.l7rule_path.format(
l7rule_id=l7rule.get('id'))).json.get(self.root_tag)
self.assertEqual(l7rule, response)
def test_get_authorized(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(self.l7rule_path.format(
l7rule_id=l7rule.get('id'))).json.get(self.root_tag)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(l7rule, response)
def test_get_not_authorized(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
response = self.get(self.l7rule_path.format(
l7rule_id=l7rule.get('id')), status=403).json
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, response)
def test_get_deleted_gives_404(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_object_status(self.l7rule_repo, api_l7rule.get('id'),
provisioning_status=constants.DELETED)
self.get(self.l7rule_path.format(l7rule_id=api_l7rule.get('id')),
status=404)
def test_get_bad_parent_policy(self):
bad_path = (self.L7RULES_PATH.format(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=uuidutils.generate_uuid()) + '/' +
uuidutils.generate_uuid())
self.get(bad_path, status=404)
def test_bad_get(self):
self.get(self.l7rule_path.format(
l7rule_id=uuidutils.generate_uuid()), status=404)
def test_get_all(self):
api_l7r_a = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api', tags=['test_tag1']).get(self.root_tag)
self.set_lb_status(self.lb_id)
api_l7r_b = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie', tags=['test_tag2']).get(self.root_tag)
self.set_lb_status(self.lb_id)
rules = self.get(self.l7rules_path).json.get(self.root_tag_list)
self.assertIsInstance(rules, list)
self.assertEqual(2, len(rules))
rule_id_types = [(r.get('id'), r.get('type'),
r['tags']) for r in rules]
self.assertIn((api_l7r_a.get('id'), api_l7r_a.get('type'),
api_l7r_a['tags']),
rule_id_types)
self.assertIn((api_l7r_b.get('id'), api_l7r_b.get('type'),
api_l7r_b['tags']),
rule_id_types)
def test_get_all_authorized(self):
api_l7r_a = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
api_l7r_b = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
rules = self.get(
self.l7rules_path).json.get(self.root_tag_list)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertIsInstance(rules, list)
self.assertEqual(2, len(rules))
rule_id_types = [(r.get('id'), r.get('type')) for r in rules]
self.assertIn((api_l7r_a.get('id'), api_l7r_a.get('type')),
rule_id_types)
self.assertIn((api_l7r_b.get('id'), api_l7r_b.get('type')),
rule_id_types)
def test_get_all_unscoped_token(self):
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
None):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': None}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
result = self.get(self.l7rules_path, status=403).json
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, result)
def test_get_all_not_authorized(self):
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
rules = self.get(self.l7rules_path, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, rules.json)
def test_get_all_sorted(self):
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_HOST_NAME,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'www.example.com').get(self.root_tag)
self.set_lb_status(self.lb_id)
response = self.get(self.l7rules_path,
params={'sort': 'type:desc'})
rules_desc = response.json.get(self.root_tag_list)
response = self.get(self.l7rules_path,
params={'sort': 'type:asc'})
rules_asc = response.json.get(self.root_tag_list)
self.assertEqual(3, len(rules_desc))
self.assertEqual(3, len(rules_asc))
rule_id_types_desc = [(rule.get('id'), rule.get('type'))
for rule in rules_desc]
rule_id_types_asc = [(rule.get('id'), rule.get('type'))
for rule in rules_asc]
self.assertEqual(rule_id_types_asc,
list(reversed(rule_id_types_desc)))
def test_get_all_limited(self):
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_HOST_NAME,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'www.example.com').get(self.root_tag)
self.set_lb_status(self.lb_id)
# First two -- should have 'next' link
first_two = self.get(self.l7rules_path, params={'limit': 2}).json
objs = first_two[self.root_tag_list]
links = first_two[self.root_tag_links]
self.assertEqual(2, len(objs))
self.assertEqual(1, len(links))
self.assertEqual('next', links[0]['rel'])
# Third + off the end -- should have previous link
third = self.get(self.l7rules_path, params={
'limit': 2,
'marker': first_two[self.root_tag_list][1]['id']}).json
objs = third[self.root_tag_list]
links = third[self.root_tag_links]
self.assertEqual(1, len(objs))
self.assertEqual(1, len(links))
self.assertEqual('previous', links[0]['rel'])
# Middle -- should have both links
middle = self.get(self.l7rules_path, params={
'limit': 1,
'marker': first_two[self.root_tag_list][0]['id']}).json
objs = middle[self.root_tag_list]
links = middle[self.root_tag_links]
self.assertEqual(1, len(objs))
self.assertEqual(2, len(links))
self.assertCountEqual(['previous', 'next'],
[link['rel'] for link in links])
def test_get_all_fields_filter(self):
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_HOST_NAME,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'www.example.com').get(self.root_tag)
self.set_lb_status(self.lb_id)
l7rus = self.get(self.l7rules_path, params={
'fields': ['id', 'compare_type']}).json
for l7ru in l7rus['rules']:
self.assertIn(u'id', l7ru)
self.assertIn(u'compare_type', l7ru)
self.assertNotIn(u'project_id', l7ru)
def test_get_one_fields_filter(self):
l7r1 = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
l7ru = self.get(
self.l7rule_path.format(l7rule_id=l7r1.get('id')),
params={'fields': ['id', 'compare_type']}).json.get(self.root_tag)
self.assertIn(u'id', l7ru)
self.assertIn(u'compare_type', l7ru)
self.assertNotIn(u'project_id', l7ru)
def test_get_all_filter(self):
ru1 = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_HOST_NAME,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'www.example.com').get(self.root_tag)
self.set_lb_status(self.lb_id)
l7rus = self.get(self.l7rules_path, params={
'id': ru1['id']}).json
self.assertEqual(1, len(l7rus['rules']))
self.assertEqual(ru1['id'],
l7rus['rules'][0]['id'])
def test_get_all_tags_filter(self):
rule1 = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api', tags=['test_tag1', 'test_tag2']).get(self.root_tag)
self.set_lb_status(self.lb_id)
rule2 = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie',
tags=['test_tag2', 'test_tag3']).get(self.root_tag)
self.set_lb_status(self.lb_id)
rule3 = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_HOST_NAME,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'www.example.com',
tags=['test_tag4', 'test_tag5']).get(self.root_tag)
self.set_lb_status(self.lb_id)
rules = self.get(
self.l7rules_path,
params={'tags': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(rules, list)
self.assertEqual(2, len(rules))
self.assertEqual(
[rule1.get('id'), rule2.get('id')],
[rule.get('id') for rule in rules]
)
rules = self.get(
self.l7rules_path,
params={'tags': ['test_tag2', 'test_tag3']}
).json.get(self.root_tag_list)
self.assertIsInstance(rules, list)
self.assertEqual(1, len(rules))
self.assertEqual(
[rule2.get('id')],
[rule.get('id') for rule in rules]
)
rules = self.get(
self.l7rules_path,
params={'tags-any': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(rules, list)
self.assertEqual(2, len(rules))
self.assertEqual(
[rule1.get('id'), rule2.get('id')],
[rule.get('id') for rule in rules]
)
rules = self.get(
self.l7rules_path,
params={'not-tags': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(rules, list)
self.assertEqual(1, len(rules))
self.assertEqual(
[rule3.get('id')],
[rule.get('id') for rule in rules]
)
rules = self.get(
self.l7rules_path,
params={'not-tags-any': ['test_tag2', 'test_tag4']}
).json.get(self.root_tag_list)
self.assertIsInstance(rules, list)
self.assertEqual(0, len(rules))
rules = self.get(
self.l7rules_path,
params={'tags': 'test_tag2',
'tags-any': ['test_tag1', 'test_tag3']}
).json.get(self.root_tag_list)
self.assertIsInstance(rules, list)
self.assertEqual(2, len(rules))
self.assertEqual(
[rule1.get('id'), rule2.get('id')],
[rule.get('id') for rule in rules]
)
rules = self.get(
self.l7rules_path,
params={'tags': 'test_tag2', 'not-tags': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(rules, list)
self.assertEqual(0, len(rules))
def test_empty_get_all(self):
response = self.get(self.l7rules_path).json.get(self.root_tag_list)
self.assertIsInstance(response, list)
self.assertEqual(0, len(response))
def test_get_all_hides_deleted(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
response = self.get(self.l7rules_path)
objects = response.json.get(self.root_tag_list)
self.assertEqual(len(objects), 1)
self.set_object_status(self.l7rule_repo, api_l7rule.get('id'),
provisioning_status=constants.DELETED)
response = self.get(self.l7rules_path)
objects = response.json.get(self.root_tag_list)
self.assertEqual(len(objects), 0)
def test_create_host_name_rule(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_HOST_NAME,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'www.example.com').get(self.root_tag)
self.assertEqual(constants.L7RULE_TYPE_HOST_NAME,
api_l7rule.get('type'))
self.assertEqual(constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
api_l7rule.get('compare_type'))
self.assertEqual('www.example.com', api_l7rule.get('value'))
self.assertIsNone(api_l7rule.get('key'))
self.assertFalse(api_l7rule.get('invert'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_CREATE,
l7rule_op_status=constants.OFFLINE)
def test_create_rule_authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_HOST_NAME,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'www.example.com').get(self.root_tag)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(constants.L7RULE_TYPE_HOST_NAME,
api_l7rule.get('type'))
self.assertEqual(constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
api_l7rule.get('compare_type'))
self.assertEqual('www.example.com', api_l7rule.get('value'))
self.assertIsNone(api_l7rule.get('key'))
self.assertFalse(api_l7rule.get('invert'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_CREATE,
l7rule_op_status=constants.OFFLINE)
def test_create_rule_not_authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_HOST_NAME,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'www.example.com', status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_l7rule)
def test_create_l7policy_in_error(self):
l7policy = self.create_l7policy(
self.listener_id, constants.L7POLICY_ACTION_REJECT)
l7policy_id = l7policy.get('l7policy').get('id')
self.set_lb_status(self.lb_id)
self.set_object_status(self.l7policy_repo, l7policy_id,
provisioning_status=constants.ERROR)
api_l7rule = self.create_l7rule(
l7policy_id, constants.L7RULE_TYPE_HOST_NAME,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'www.example.com', status=409)
ref_msg = ('L7Policy %s is immutable and cannot be updated.' %
l7policy_id)
self.assertEqual(ref_msg, api_l7rule.get('faultstring'))
def test_create_path_rule(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH, '/api',
invert=True).get(self.root_tag)
self.assertEqual(constants.L7RULE_TYPE_PATH, api_l7rule.get('type'))
self.assertEqual(constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
api_l7rule.get('compare_type'))
self.assertEqual('/api', api_l7rule.get('value'))
self.assertIsNone(api_l7rule.get('key'))
self.assertTrue(api_l7rule.get('invert'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_CREATE,
l7rule_op_status=constants.OFFLINE)
def test_create_file_type_rule(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_FILE_TYPE,
constants.L7RULE_COMPARE_TYPE_REGEX, 'jpg|png').get(self.root_tag)
self.assertEqual(constants.L7RULE_TYPE_FILE_TYPE,
api_l7rule.get('type'))
self.assertEqual(constants.L7RULE_COMPARE_TYPE_REGEX,
api_l7rule.get('compare_type'))
self.assertEqual('jpg|png', api_l7rule.get('value'))
self.assertIsNone(api_l7rule.get('key'))
self.assertFalse(api_l7rule.get('invert'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_CREATE,
l7rule_op_status=constants.OFFLINE)
def test_create_header_rule(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_HEADER,
constants.L7RULE_COMPARE_TYPE_ENDS_WITH, '"some string"',
key='Some-header').get(self.root_tag)
self.assertEqual(constants.L7RULE_TYPE_HEADER, api_l7rule.get('type'))
self.assertEqual(constants.L7RULE_COMPARE_TYPE_ENDS_WITH,
api_l7rule.get('compare_type'))
self.assertEqual('"some string"', api_l7rule.get('value'))
self.assertEqual('Some-header', api_l7rule.get('key'))
self.assertFalse(api_l7rule.get('invert'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_CREATE,
l7rule_op_status=constants.OFFLINE)
def test_create_cookie_rule(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_COOKIE,
constants.L7RULE_COMPARE_TYPE_CONTAINS, 'some-value',
key='some-cookie').get(self.root_tag)
self.assertEqual(constants.L7RULE_TYPE_COOKIE, api_l7rule.get('type'))
self.assertEqual(constants.L7RULE_COMPARE_TYPE_CONTAINS,
api_l7rule.get('compare_type'))
self.assertEqual('some-value', api_l7rule.get('value'))
self.assertEqual('some-cookie', api_l7rule.get('key'))
self.assertFalse(api_l7rule.get('invert'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_CREATE,
l7rule_op_status=constants.OFFLINE)
@mock.patch('octavia.common.constants.MAX_L7RULES_PER_L7POLICY', new=2)
def test_create_too_many_rules(self):
for i in range(0, constants.MAX_L7RULES_PER_L7POLICY):
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
body = {'type': constants.L7RULE_TYPE_PATH,
'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'value': '/api'}
self.post(self.l7rules_path, self._build_body(body), status=409)
def test_bad_create(self):
l7rule = {'name': 'test1'}
self.post(self.l7rules_path, self._build_body(l7rule), status=400)
def test_bad_create_host_name_rule(self):
l7rule = {'type': constants.L7RULE_TYPE_HOST_NAME,
'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH}
self.post(self.l7rules_path, self._build_body(l7rule), status=400)
def test_bad_create_path_rule(self):
l7rule = {'type': constants.L7RULE_TYPE_PATH,
'compare_type': constants.L7RULE_COMPARE_TYPE_REGEX,
'value': 'bad string\\'}
self.post(self.l7rules_path, self._build_body(l7rule), status=400)
def test_bad_create_file_type_rule(self):
l7rule = {'type': constants.L7RULE_TYPE_FILE_TYPE,
'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'value': 'png'}
self.post(self.l7rules_path, self._build_body(l7rule), status=400)
def test_bad_create_header_rule(self):
l7rule = {'type': constants.L7RULE_TYPE_HEADER,
'compare_type': constants.L7RULE_COMPARE_TYPE_CONTAINS,
'value': 'some-string'}
self.post(self.l7rules_path, self._build_body(l7rule), status=400)
def test_bad_create_cookie_rule(self):
l7rule = {'type': constants.L7RULE_TYPE_COOKIE,
'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'key': 'bad cookie name',
'value': 'some-string'}
self.post(self.l7rules_path, self._build_body(l7rule), status=400)
@mock.patch('octavia.api.drivers.utils.call_provider')
def test_create_with_bad_provider(self, mock_provider):
mock_provider.side_effect = exceptions.ProviderDriverError(
prov='bad_driver', user_msg='broken')
l7rule = {'compare_type': 'REGEX',
'invert': False,
'type': 'PATH',
'value': '/images*',
'admin_state_up': True}
response = self.post(self.l7rules_path, self._build_body(l7rule),
status=500)
self.assertIn('Provider \'bad_driver\' reports error: broken',
response.json.get('faultstring'))
def test_create_with_ssl_rule_types(self):
test_mapping = {
constants.L7RULE_TYPE_SSL_CONN_HAS_CERT: {
'value': 'tRuE',
'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO},
constants.L7RULE_TYPE_SSL_VERIFY_RESULT: {
'value': '0',
'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO},
constants.L7RULE_TYPE_SSL_DN_FIELD: {
'key': 'st-1', 'value': 'ST-FIELD1-PREFIX',
'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH}
}
for l7rule_type, test_body in test_mapping.items():
self.set_lb_status(self.lb_id)
test_body.update({'type': l7rule_type})
api_l7rule = self.create_l7rule(
self.l7policy_id, l7rule_type,
test_body['compare_type'], test_body['value'],
key=test_body.get('key')).get(self.root_tag)
self.assertEqual(l7rule_type, api_l7rule.get('type'))
self.assertEqual(test_body['compare_type'],
api_l7rule.get('compare_type'))
self.assertEqual(test_body['value'], api_l7rule.get('value'))
if test_body.get('key'):
self.assertEqual(test_body['key'], api_l7rule.get('key'))
self.assertFalse(api_l7rule.get('invert'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_CREATE,
l7rule_op_status=constants.OFFLINE)
def _test_bad_cases_with_ssl_rule_types(self, is_create=True,
rule_id=None):
if is_create:
req_func = self.post
first_req_arg = self.l7rules_path
else:
req_func = self.put
first_req_arg = self.l7rule_path.format(l7rule_id=rule_id)
# test bad cases of L7RULE_TYPE_SSL_CONN_HAS_CERT
l7rule = {'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'invert': False,
'type': constants.L7RULE_TYPE_SSL_CONN_HAS_CERT,
'value': 'true',
'admin_state_up': True,
'key': 'no-need-key'}
response = req_func(first_req_arg, self._build_body(l7rule),
status=400).json
self.assertIn('L7rule type {0} does not use the "key" field.'.format(
constants.L7RULE_TYPE_SSL_CONN_HAS_CERT),
response.get('faultstring'))
l7rule.pop('key')
l7rule['value'] = 'not-true-string'
response = req_func(first_req_arg, self._build_body(l7rule),
status=400).json
self.assertIn(
'L7rule value {0} is not a boolean True string.'.format(
l7rule['value']), response.get('faultstring'))
l7rule['value'] = 'tRUe'
l7rule['compare_type'] = constants.L7RULE_COMPARE_TYPE_STARTS_WITH
response = req_func(first_req_arg, self._build_body(l7rule),
status=400).json
self.assertIn(
'L7rule type {0} only supports the {1} compare type.'.format(
constants.L7RULE_TYPE_SSL_CONN_HAS_CERT,
constants.L7RULE_COMPARE_TYPE_EQUAL_TO),
response.get('faultstring'))
# test bad cases of L7RULE_TYPE_SSL_VERIFY_RES
l7rule = {'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'invert': False,
'type': constants.L7RULE_TYPE_SSL_VERIFY_RESULT,
'value': 'true',
'admin_state_up': True,
'key': 'no-need-key'}
response = req_func(first_req_arg, self._build_body(l7rule),
status=400).json
self.assertIn(
'L7rule type {0} does not use the "key" field.'.format(
l7rule['type']), response.get('faultstring'))
l7rule.pop('key')
response = req_func(first_req_arg, self._build_body(l7rule),
status=400).json
self.assertIn(
'L7rule type {0} needs a int value, which is >= 0'.format(
l7rule['type']), response.get('faultstring'))
l7rule['value'] = '0'
l7rule['compare_type'] = constants.L7RULE_COMPARE_TYPE_STARTS_WITH
response = req_func(first_req_arg, self._build_body(l7rule),
status=400).json
self.assertIn(
'L7rule type {0} only supports the {1} compare type.'.format(
l7rule['type'], constants.L7RULE_COMPARE_TYPE_EQUAL_TO),
response.get('faultstring'))
# test bad cases of L7RULE_TYPE_SSL_DN_FIELD
l7rule = {'compare_type': constants.L7RULE_COMPARE_TYPE_REGEX,
'invert': False,
'type': constants.L7RULE_TYPE_SSL_DN_FIELD,
'value': 'bad regex\\',
'admin_state_up': True}
# This case just test that fail to parse the regex from the value
req_func(first_req_arg, self._build_body(l7rule), status=400).json
l7rule['value'] = '^.test*$'
response = req_func(first_req_arg, self._build_body(l7rule),
status=400).json
self.assertIn(
'L7rule type {0} needs to specify a key and a value.'.format(
l7rule['type']), response.get('faultstring'))
l7rule['key'] = 'NOT_SUPPORTED_DN_FIELD'
response = req_func(first_req_arg, self._build_body(l7rule),
status=400).json
self.assertIn('Invalid L7rule distinguished name field.',
response.get('faultstring'))
def test_create_bad_cases_with_ssl_rule_types(self):
self._test_bad_cases_with_ssl_rule_types()
def test_create_over_quota(self):
self.start_quota_mock(data_models.L7Rule)
l7rule = {'compare_type': 'REGEX',
'invert': False,
'type': 'PATH',
'value': '/images*',
'admin_state_up': True}
self.post(self.l7rules_path, self._build_body(l7rule), status=403)
def test_update(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api', tags=['old_tag']).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_l7rule = {'value': '/images', 'tags': ['new_tag']}
response = self.put(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id')),
self._build_body(new_l7rule)).json.get(self.root_tag)
self.assertEqual('/images', response.get('value'))
self.assertEqual(['new_tag'], response['tags'])
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_UPDATE)
def test_update_authorized(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
new_l7rule = {'value': '/images'}
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.put(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id')),
self._build_body(new_l7rule)).json.get(self.root_tag)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual('/images', response.get('value'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_UPDATE)
def test_update_not_authorized(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
new_l7rule = {'value': '/images'}
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
response = self.put(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id')),
self._build_body(new_l7rule), status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, response.json)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.ACTIVE,
listener_prov_status=constants.ACTIVE,
l7policy_prov_status=constants.ACTIVE,
l7rule_prov_status=constants.ACTIVE)
def test_bad_update(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
new_l7rule = {'type': 'bad type'}
self.put(self.l7rule_path.format(l7rule_id=l7rule.get('id')),
self._build_body(new_l7rule), status=400)
@mock.patch('octavia.api.drivers.utils.call_provider')
def test_update_with_bad_provider(self, mock_provider):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
new_l7rule = {'value': '/images'}
mock_provider.side_effect = exceptions.ProviderDriverError(
prov='bad_driver', user_msg='broken')
response = self.put(
self.l7rule_path.format(l7rule_id=api_l7rule.get('id')),
self._build_body(new_l7rule), status=500)
self.assertIn('Provider \'bad_driver\' reports error: broken',
response.json.get('faultstring'))
def test_update_with_invalid_rule(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
new_l7rule = {'compare_type': constants.L7RULE_COMPARE_TYPE_REGEX,
'value': 'bad string\\'}
self.put(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id')), self._build_body(new_l7rule),
status=400)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
l7rule_prov_status=constants.ACTIVE)
def test_update_with_ssl_rule_types(self):
test_mapping = {
constants.L7RULE_TYPE_SSL_CONN_HAS_CERT: {
'value': 'tRuE',
'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO},
constants.L7RULE_TYPE_SSL_VERIFY_RESULT: {
'value': '0',
'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO},
constants.L7RULE_TYPE_SSL_DN_FIELD: {
'key': 'st-1', 'value': 'ST-FIELD1-PREFIX',
'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH}
}
for l7rule_type, test_body in test_mapping.items():
self.set_lb_status(self.lb_id)
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
test_body.update({'type': l7rule_type})
response = self.put(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id')),
self._build_body(test_body)).json.get(self.root_tag)
self.assertEqual(l7rule_type, response.get('type'))
self.assertEqual(test_body['compare_type'],
response.get('compare_type'))
self.assertEqual(test_body['value'], response.get('value'))
if test_body.get('key'):
self.assertEqual(test_body['key'], response.get('key'))
self.assertFalse(response.get('invert'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=response.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_UPDATE)
def test_update_bad_cases_with_ssl_rule_types(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self._test_bad_cases_with_ssl_rule_types(
is_create=False, rule_id=api_l7rule.get('id'))
def test_update_invert_none(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api', tags=['old_tag'], invert=True).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_l7rule = {'invert': None}
response = self.put(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id')),
self._build_body(new_l7rule)).json.get(self.root_tag)
self.assertFalse(response.get('invert'))
def test_delete(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_l7rule['provisioning_status'] = constants.ACTIVE
api_l7rule['operating_status'] = constants.ONLINE
api_l7rule.pop('updated_at')
response = self.get(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_l7rule, response)
self.delete(self.l7rule_path.format(l7rule_id=api_l7rule.get('id')))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_DELETE)
self.set_lb_status(self.lb_id)
def test_delete_authorized(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_l7rule['provisioning_status'] = constants.ACTIVE
api_l7rule['operating_status'] = constants.ONLINE
api_l7rule.pop('updated_at')
response = self.get(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_l7rule, response)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.delete(
self.l7rule_path.format(l7rule_id=api_l7rule.get('id')))
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
l7policy_prov_status=constants.PENDING_UPDATE,
l7rule_prov_status=constants.PENDING_DELETE)
self.set_lb_status(self.lb_id)
def test_delete_not_authorized(self):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_l7rule['provisioning_status'] = constants.ACTIVE
api_l7rule['operating_status'] = constants.ONLINE
api_l7rule.pop('updated_at')
response = self.get(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_l7rule, response)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
self.delete(
self.l7rule_path.format(l7rule_id=api_l7rule.get('id')),
status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
l7policy_id=self.l7policy_id, l7rule_id=api_l7rule.get('id'),
lb_prov_status=constants.ACTIVE,
listener_prov_status=constants.ACTIVE,
l7policy_prov_status=constants.ACTIVE,
l7rule_prov_status=constants.ACTIVE)
def test_bad_delete(self):
self.delete(self.l7rule_path.format(
l7rule_id=uuidutils.generate_uuid()), status=404)
@mock.patch('octavia.api.drivers.utils.call_provider')
def test_delete_with_bad_provider(self, mock_provider):
api_l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_l7rule['provisioning_status'] = constants.ACTIVE
api_l7rule['operating_status'] = constants.ONLINE
response = self.get(self.l7rule_path.format(
l7rule_id=api_l7rule.get('id'))).json.get(self.root_tag)
self.assertIsNone(api_l7rule.pop('updated_at'))
self.assertIsNotNone(response.pop('updated_at'))
self.assertEqual(api_l7rule, response)
mock_provider.side_effect = exceptions.ProviderDriverError(
prov='bad_driver', user_msg='broken')
self.delete(self.l7rule_path.format(l7rule_id=api_l7rule.get('id')),
status=500)
def test_create_when_lb_pending_update(self):
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.put(self.LB_PATH.format(lb_id=self.lb_id),
body={'loadbalancer': {'name': 'test_name_change'}})
new_l7rule = {'type': constants.L7RULE_TYPE_PATH,
'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'value': '/api'}
self.post(self.l7rules_path, body=self._build_body(new_l7rule),
status=409)
def test_update_when_lb_pending_update(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.put(self.LB_PATH.format(lb_id=self.lb_id),
body={'loadbalancer': {'name': 'test_name_change'}})
new_l7rule = {'type': constants.L7RULE_TYPE_HOST_NAME,
'compare_type': constants.L7RULE_COMPARE_TYPE_REGEX,
'value': '.*.example.com'}
self.put(self.l7rule_path.format(l7rule_id=l7rule.get('id')),
body=self._build_body(new_l7rule), status=409)
def test_delete_when_lb_pending_update(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.put(self.LB_PATH.format(lb_id=self.lb_id),
body={'loadbalancer': {'name': 'test_name_change'}})
self.delete(self.l7rule_path.format(l7rule_id=l7rule.get('id')),
status=409)
def test_create_when_lb_pending_delete(self):
self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
params={'cascade': "true"})
new_l7rule = {'type': constants.L7RULE_TYPE_HEADER,
'compare_type':
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'value': 'some-string',
'key': 'Some-header'}
self.post(self.l7rules_path, body=self._build_body(new_l7rule),
status=409)
def test_update_when_lb_pending_delete(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
params={'cascade': "true"})
new_l7rule = {'type': constants.L7RULE_TYPE_COOKIE,
'compare_type':
constants.L7RULE_COMPARE_TYPE_ENDS_WITH,
'value': 'some-string',
'key': 'some-cookie'}
self.put(self.l7rule_path.format(l7rule_id=l7rule.get('id')),
body=self._build_body(new_l7rule), status=409)
def test_delete_when_lb_pending_delete(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
self.set_lb_status(self.lb_id)
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
params={'cascade': "true"})
self.delete(self.l7rule_path.format(l7rule_id=l7rule.get('id')),
status=409)
def test_update_already_deleted(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
# This updates the child objects
self.set_lb_status(self.lb_id, status=constants.DELETED)
new_l7rule = {'type': constants.L7RULE_TYPE_COOKIE,
'compare_type':
constants.L7RULE_COMPARE_TYPE_ENDS_WITH,
'value': 'some-string',
'key': 'some-cookie'}
self.put(self.l7rule_path.format(l7rule_id=l7rule.get('id')),
body=self._build_body(new_l7rule), status=404)
def test_delete_already_deleted(self):
l7rule = self.create_l7rule(
self.l7policy_id, constants.L7RULE_TYPE_PATH,
constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'/api').get(self.root_tag)
# This updates the child objects
self.set_lb_status(self.lb_id, status=constants.DELETED)
self.delete(self.l7rule_path.format(l7rule_id=l7rule.get('id')),
status=404)
| 46.79648
| 79
| 0.624168
| 7,404
| 61,163
| 4.826175
| 0.046191
| 0.074301
| 0.046624
| 0.065486
| 0.905969
| 0.886715
| 0.87471
| 0.849775
| 0.827051
| 0.811519
| 0
| 0.021709
| 0.267204
| 61,163
| 1,306
| 80
| 46.832312
| 0.775547
| 0.020241
| 0
| 0.735897
| 0
| 0
| 0.098639
| 0.013056
| 0
| 0
| 0
| 0
| 0.11453
| 1
| 0.052137
| false
| 0
| 0.008547
| 0
| 0.064103
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65ea21b14fdb8170dc1cc422ec75920418aa27f4
| 102
|
py
|
Python
|
vendor-local/lib/python/celery/execute/__init__.py
|
Mozilla-GitHub-Standards/54c69db06ef83bda60e995a6c34ecfd168ca028994e40ce817295415bb409f0c
|
f80e7c0cff97a1e9b301aa04015db983c7645778
|
[
"BSD-3-Clause"
] | 4
|
2015-05-08T16:58:53.000Z
|
2019-09-06T05:30:59.000Z
|
vendor-local/lib/python/celery/execute/__init__.py
|
Mozilla-GitHub-Standards/54c69db06ef83bda60e995a6c34ecfd168ca028994e40ce817295415bb409f0c
|
f80e7c0cff97a1e9b301aa04015db983c7645778
|
[
"BSD-3-Clause"
] | 2
|
2019-02-17T17:44:53.000Z
|
2019-03-28T03:54:39.000Z
|
vendor-local/lib/python/celery/execute/__init__.py
|
Mozilla-GitHub-Standards/54c69db06ef83bda60e995a6c34ecfd168ca028994e40ce817295415bb409f0c
|
f80e7c0cff97a1e9b301aa04015db983c7645778
|
[
"BSD-3-Clause"
] | 7
|
2015-05-21T15:38:29.000Z
|
2019-10-28T23:39:06.000Z
|
from __future__ import absolute_import
from .. import current_app
send_task = current_app.send_task
| 17
| 38
| 0.833333
| 15
| 102
| 5.066667
| 0.533333
| 0.263158
| 0.368421
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127451
| 102
| 5
| 39
| 20.4
| 0.853933
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
02d4a9295118d18edcfab748a7829590637003b1
| 128
|
py
|
Python
|
app/features.py
|
Y0plait/TPS
|
3961b86cf2ed5635034d1b8386494f4b7d04ec5f
|
[
"MIT"
] | null | null | null |
app/features.py
|
Y0plait/TPS
|
3961b86cf2ed5635034d1b8386494f4b7d04ec5f
|
[
"MIT"
] | null | null | null |
app/features.py
|
Y0plait/TPS
|
3961b86cf2ed5635034d1b8386494f4b7d04ec5f
|
[
"MIT"
] | null | null | null |
from app import app, render_template
@app.route('/features.html')
def features():
return render_template('features.html')
| 18.285714
| 43
| 0.742188
| 17
| 128
| 5.470588
| 0.588235
| 0.301075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 128
| 6
| 44
| 21.333333
| 0.830357
| 0
| 0
| 0
| 0
| 0
| 0.210938
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
b8aa3a99ae118f6ee50b7c576ab812a0a678140c
| 4,503
|
py
|
Python
|
Chapter 06/ch06_r08.py
|
PacktPublishing/Modern-Python-Cookbook
|
40ff656c64421ddf6c288e058bb99a929f6d4c39
|
[
"MIT"
] | 107
|
2016-11-30T02:13:39.000Z
|
2022-03-31T06:38:59.000Z
|
Chapter 06/ch06_r08.py
|
fengfanzhiwu/Modern-Python-Cookbook
|
8eff962f6eae514d531beca9051aefa34941ef4d
|
[
"MIT"
] | 2
|
2017-04-19T04:08:21.000Z
|
2019-03-09T16:18:33.000Z
|
Chapter 06/ch06_r08.py
|
fengfanzhiwu/Modern-Python-Cookbook
|
8eff962f6eae514d531beca9051aefa34941ef4d
|
[
"MIT"
] | 71
|
2017-01-16T06:35:17.000Z
|
2022-03-01T21:25:54.000Z
|
"""Python Cookbook
Chapter 6, recipe 8
"""
from collections import deque
class Leg:
"""
>>> leg_1 = Leg()
>>> leg_1.rate = 6.0 # knots
>>> leg_1.distance = 35.6 # nautical miles
>>> ("option 1 {leg.distance:.1f}nm"
... " at {leg.rate:.2f}kt"
... " = {leg.time:.2f}hr".format(leg=leg_1))
'option 1 35.6nm at 6.00kt = 5.93hr'
leg_1.distance = 38.2
>>> ("option 2 {leg.distance:.1f}nm"
... " at {leg.rate:.2f}kt"
... " = {leg.time:.2f}hr".format(leg=leg_1))
'option 2 35.6nm at 6.00kt = 5.93hr'
leg_1.time= 7
>>> ("option 3 {leg.distance:.1f}nm"
... " at {leg.rate:.2f}kt"
... " = {leg.time:.2f}hr".format(leg=leg_1))
'option 3 35.6nm at 6.00kt = 5.93hr'
"""
def __init__(self, rate=None, time=None, distance=None):
self._changes= deque(maxlen=2)
self._rate= rate
if rate: self._calculate('rate')
self._time= time
if time: self._calculate('time')
self._distance= distance
if distance: self._calculate('distance')
def _calculate(self, change):
if change not in self._changes:
self._changes.append(change)
compute = {'rate', 'time', 'distance'} - set(self._changes)
if compute == {'distance'}:
self._distance = self._time * self._rate
elif compute == {'time'}:
self._time = self._distance / self._rate
elif compute == {'rate'}:
self._rate = self._distance / self._time
@property
def rate(self):
return self._rate
@rate.setter
def rate(self, value):
self._rate = value
self._calculate('rate')
@property
def time(self):
return self._time
@time.setter
def time(self, value):
self._time = value
self._calculate('time')
@property
def distance(self):
return self._distance
@distance.setter
def distance(self, value):
self._distance = value
self._calculate('distance')
class Leg_Alt:
"""
>>> leg_2 = Leg_Alt(rate=6, distance=35.6)
>>> round(leg_2.rate,1)
6
>>> round(leg_2.time,1)
5.9
>>> round(leg_2.distance,1)
35.6
>>> ("option 1 {leg.distance:.1f}nm"
... " at {leg.rate:.2f}kt"
... " = {leg.time:.2f}hr".format(leg=leg_2))
'option 1 35.6nm at 6.00kt = 5.93hr'
"""
def __init__(self, rate=None, time=None, distance=None):
self._changes= deque(maxlen=2)
self._rate= rate
if rate: self._calculate('rate')
self._time= time
if time: self._calculate('time')
self._distance= distance
if distance: self._calculate('distance')
def calc_distance(self):
self._distance = self._time * self._rate
def calc_time(self):
self._time = self._distance / self._rate
def calc_rate(self):
self._rate = self._distance / self._time
def _calculate(self, change):
if change not in self._changes:
self._changes.append(change)
compute = {'rate', 'time', 'distance'} - set(self._changes)
if len(compute) == 1:
name = compute.pop()
method = getattr(self, 'calc_'+name)
method()
@property
def rate(self):
return self._rate
@rate.setter
def rate(self, value):
self._rate = value
self._calculate('rate')
@property
def time(self):
return self._time
@time.setter
def time(self, value):
self._time = value
self._calculate('time')
@property
def distance(self):
return self._distance
@distance.setter
def distance(self, value):
self._distance = value
self._calculate('distance')
if __name__ == "__main__":
import doctest
doctest.testmod()
leg_1 = Leg()
leg_1.rate = 6.0 # knots
leg_1.distance = 35.6 # nautical miles
# print( vars(leg_1) )
print("option 1 {leg.distance:.1f}nm at {leg.rate:.2f}kt = {leg.time:.2f}hr".
format(leg=leg_1))
leg_1.distance = 38.2
print("option 2 {leg.distance:.1f}nm at {leg.rate:.2f}kt = {leg.time:.2f}hr".
format(leg=leg_1))
leg_1.time= 7
print("option 3 {leg.distance:.1f}nm at {leg.rate:.2f}kt = {leg.time:.2f}hr".
format(leg=leg_1))
leg_2 = Leg_Alt(rate=6, distance=35.6)
print(leg_2.rate, leg_2.time, leg_2.distance)
print("option 1 {leg.distance:.1f}nm at {leg.rate:.2f}kt = {leg.time:.2f}hr".
format(leg=leg_2))
| 28.14375
| 81
| 0.574506
| 618
| 4,503
| 4.011327
| 0.113269
| 0.02743
| 0.025413
| 0.048407
| 0.847519
| 0.838645
| 0.770875
| 0.770875
| 0.770875
| 0.73094
| 0
| 0.042262
| 0.269598
| 4,503
| 159
| 82
| 28.320755
| 0.711462
| 0.204752
| 0
| 0.747573
| 0
| 0.038835
| 0.115541
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.184466
| false
| 0
| 0.019417
| 0.058252
| 0.281553
| 0.048544
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b219438c9cc38bb45b9080327491f7ffaad699c7
| 3,980
|
py
|
Python
|
experiments/ghost_minitaur/process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
experiments/ghost_minitaur/process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
experiments/ghost_minitaur/process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0, '../../')
from libs_common.RLStatsCompute import *
import matplotlib.pyplot as plt
result_path = "./results/"
files = []
files.append("./models/ddpg_baseline/run_0/result/result.log")
files.append("./models/ddpg_baseline/run_1/result/result.log")
files.append("./models/ddpg_baseline/run_2/result/result.log")
files.append("./models/ddpg_baseline/run_3/result/result.log")
files.append("./models/ddpg_baseline/run_4/result/result.log")
files.append("./models/ddpg_baseline/run_5/result/result.log")
rl_stats_compute_ddpg = RLStatsCompute(files, result_path + "ddpg_baseline.log")
'''
files = []
files.append("./models/ddpg_imagination/run_0/result/result.log")
files.append("./models/ddpg_imagination/run_1/result/result.log")
files.append("./models/ddpg_imagination/run_2/result/result.log")
files.append("./models/ddpg_imagination/run_3/result/result.log")
rl_stats_compute_imagination = RLStatsCompute(files, result_path + "ddpg_imagination.log")
'''
plt.cla()
plt.ylabel("score")
plt.xlabel("episode")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_ddpg.games_mean, rl_stats_compute_ddpg.episode_mean, label="ddpg baseline", color='blue')
plt.fill_between(rl_stats_compute_ddpg.games_mean, rl_stats_compute_ddpg.episode_lower, rl_stats_compute_ddpg.episode_upper, color='blue', alpha=0.2)
#plt.plot(rl_stats_compute_imagination.games_mean, rl_stats_compute_imagination.episode_mean, label="ddpg imagination entropy", color='red')
#plt.fill_between(rl_stats_compute_imagination.games_mean, rl_stats_compute_imagination.episode_lower, rl_stats_compute_imagination.episode_upper, color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "score_per_episode.png", dpi = 300)
plt.cla()
plt.ylabel("score")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_ddpg.iterations, rl_stats_compute_ddpg.episode_mean, label="ddpg baseline", color='blue')
plt.fill_between(rl_stats_compute_ddpg.iterations, rl_stats_compute_ddpg.episode_lower, rl_stats_compute_ddpg.episode_upper, color='blue', alpha=0.2)
#plt.plot(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.episode_mean, label="ddpg imagination entropy", color='red')
#plt.fill_between(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.episode_lower, rl_stats_compute_imagination.episode_upper, color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "score_per_iteration.png", dpi = 300)
'''
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.entropy_mean, label="entropy", color='orange')
plt.fill_between(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.entropy_lower, rl_stats_compute_imagination.entropy_upper, color='orange', alpha=0.2)
plt.plot(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.curiosity_mean, label="curiosity", color='green')
plt.fill_between(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.curiosity_lower, rl_stats_compute_imagination.curiosity_upper, color='green', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "internal_motivation.png", dpi = 300)
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.forward_loss_mean, label="forward model loss", color='navy')
plt.fill_between(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.forward_loss_lower, rl_stats_compute_imagination.forward_loss_upper, color='navy', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "forward_model_loss.png", dpi = 300)
'''
| 43.26087
| 180
| 0.805779
| 583
| 3,980
| 5.186964
| 0.12693
| 0.085648
| 0.171296
| 0.214947
| 0.886574
| 0.833995
| 0.782738
| 0.763558
| 0.763558
| 0.632275
| 0
| 0.012997
| 0.052764
| 3,980
| 92
| 181
| 43.26087
| 0.789125
| 0.154774
| 0
| 0.275862
| 0
| 0
| 0.283136
| 0.199129
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.103448
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b23230c6f112564663d234afc832860a9aeb1514
| 15,580
|
py
|
Python
|
tests/gateways/disk/test_read.py
|
sakibguy/conda
|
892f9f6a196312765b515b0db53a5558566fe456
|
[
"BSD-3-Clause"
] | 4,825
|
2015-01-01T15:43:08.000Z
|
2022-03-31T18:24:48.000Z
|
tests/gateways/disk/test_read.py
|
sakibguy/conda
|
892f9f6a196312765b515b0db53a5558566fe456
|
[
"BSD-3-Clause"
] | 11,043
|
2015-01-01T01:38:46.000Z
|
2022-03-31T20:28:46.000Z
|
tests/gateways/disk/test_read.py
|
sakibguy/conda
|
892f9f6a196312765b515b0db53a5558566fe456
|
[
"BSD-3-Clause"
] | 1,401
|
2015-01-01T15:43:18.000Z
|
2022-03-30T18:33:40.000Z
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import absolute_import, division, print_function, unicode_literals
from os.path import isdir, join, dirname
from pprint import pprint
from conda.common.compat import on_win
from conda.common.path import get_python_site_packages_short_path
from conda.common.serialize import json_dump, json_load
from conda.gateways.disk.read import read_python_record
import pytest
from tests.data.env_metadata import (
METADATA_VERSION_PATHS, PATH_TEST_ENV_1, PATH_TEST_ENV_2, PATH_TEST_ENV_3, PATH_TEST_ENV_4,
__file__ as env_metadata_file,
)
ENV_METADATA_DIR = dirname(env_metadata_file)
def test_scrapy_py36_osx_whl():
anchor_file = "lib/python3.6/site-packages/Scrapy-1.5.1.dist-info/RECORD"
prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl")
if not isdir(prefix_path):
pytest.skip("test files not found: %s" % prefix_path)
prefix_rec = read_python_record(prefix_path, anchor_file, "3.6")
dumped_rec = json_load(json_dump(prefix_rec.dump()))
files = dumped_rec.pop("files")
paths_data = dumped_rec.pop("paths_data")
print(json_dump(dumped_rec))
assert dumped_rec == {
"build": "pypi_0",
"build_number": 0,
"channel": "https://conda.anaconda.org/pypi",
"constrains": [],
"depends": [
"cssselect >=0.9",
"lxml",
"parsel >=1.1",
"pydispatcher >=2.0.5",
"pyopenssl",
"python 3.6.*",
"queuelib",
"service-identity",
"six >=1.5.2",
"twisted >=13.1.0",
"w3lib >=1.17.0"
],
"fn": "Scrapy-1.5.1.dist-info",
"name": "scrapy",
"package_type": "virtual_python_wheel",
"subdir": "pypi",
"version": "1.5.1"
}
print(json_dump(files))
print(json_dump(paths_data["paths"]))
sp_dir = get_python_site_packages_short_path("3.6")
assert sp_dir + "/scrapy/core/scraper.py" in files
assert sp_dir + "/scrapy/core/__pycache__/scraper.cpython-36.pyc" in files
pd1 = {
"_path": sp_dir + "/scrapy/core/scraper.py",
"path_type": "hardlink",
"sha256": "2559X9n2z1YKdFV9ElMRD6_88LIdqH1a2UwQimStt2k",
"size_in_bytes": 9960
}
assert pd1 in paths_data["paths"]
pd2 = {
"_path": sp_dir + "/scrapy/core/__pycache__/scraper.cpython-36.pyc",
"path_type": "hardlink",
"sha256": None,
"size_in_bytes": None
}
assert pd2 in paths_data["paths"]
pd3 = {
"_path": "../bin/scrapy" if on_win else "bin/scrapy",
"path_type": "hardlink",
"sha256": "RncAAoxSEnSi_0VIopaRxsq6kryQGL61YbEweN2TW3g",
"size_in_bytes": 268
}
assert pd3 in paths_data["paths"]
def test_twilio_py36_osx_whl():
anchor_file = "lib/python3.6/site-packages/twilio-6.16.1.dist-info/RECORD"
prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl")
if not isdir(prefix_path):
pytest.skip("test files not found: %s" % prefix_path)
prefix_rec = read_python_record(prefix_path, anchor_file, "3.6")
pprint(prefix_rec.depends)
pprint(prefix_rec.constrains)
dumped_rec = json_load(json_dump(prefix_rec.dump()))
files = dumped_rec.pop("files")
paths_data = dumped_rec.pop("paths_data")
print(json_dump(dumped_rec))
assert dumped_rec == {
"build": "pypi_0",
"build_number": 0,
"channel": "https://conda.anaconda.org/pypi",
"constrains": [],
"depends": [
"pyjwt >=1.4.2",
"pysocks",
"python 3.6.*",
"pytz",
"requests >=2.0.0",
"six"
],
"fn": "twilio-6.16.1.dist-info",
"name": "twilio",
"package_type": "virtual_python_wheel",
"subdir": "pypi",
"version": "6.16.1"
}
print(json_dump(files))
print(json_dump(paths_data["paths"]))
sp_dir = get_python_site_packages_short_path("3.6")
assert sp_dir + "/twilio/compat.py" in files
assert sp_dir + "/twilio/__pycache__/compat.cpython-36.pyc" in files
pd1 = {
"_path": sp_dir + "/twilio/compat.py",
"path_type": "hardlink",
"sha256": "sJ1t7CKvxpipiX5cyH1YwXTf3n_FsLf_taUhuCVsCwE",
"size_in_bytes": 517
}
assert pd1 in paths_data["paths"]
pd2 = {
"_path": sp_dir + "/twilio/jwt/__pycache__/compat.cpython-36.pyc",
"path_type": "hardlink",
"sha256": None,
"size_in_bytes": None
}
assert pd2 in paths_data["paths"]
def test_pyjwt_py36_osx_whl():
anchor_file = "lib/python3.6/site-packages/PyJWT-1.6.4.dist-info/RECORD"
prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl")
if not isdir(prefix_path):
pytest.skip("test files not found: %s" % prefix_path)
prefix_rec = read_python_record(prefix_path, anchor_file, "3.6")
dumped_rec = json_load(json_dump(prefix_rec.dump()))
files = dumped_rec.pop("files")
paths_data = dumped_rec.pop("paths_data")
print(json_dump(dumped_rec))
assert dumped_rec == {
"build": "pypi_0",
"build_number": 0,
"channel": "https://conda.anaconda.org/pypi",
"constrains": [
"cryptography >=1.4",
"pytest <4,>3"
],
"depends": [
"python 3.6.*"
],
"fn": "PyJWT-1.6.4.dist-info",
"name": "pyjwt",
"package_type": "virtual_python_wheel",
"subdir": "pypi",
"version": "1.6.4"
}
print(json_dump(files))
print(json_dump(paths_data["paths"]))
sp_dir = get_python_site_packages_short_path("3.6")
assert ("../bin/pyjwt" if on_win else "bin/pyjwt") in files
assert sp_dir + '/jwt/__pycache__/__init__.cpython-36.pyc' in files
pd1 = {
"_path": "../bin/pyjwt" if on_win else "bin/pyjwt",
"path_type": "hardlink",
"sha256": "wZET_24uZDEpsMdhAQ78Ass2k-76aQ59yPSE4DTE2To",
"size_in_bytes": 260
}
assert pd1 in paths_data["paths"]
pd2 = {
"_path": sp_dir + "/jwt/contrib/__pycache__/__init__.cpython-36.pyc",
"path_type": "hardlink",
"sha256": None,
"size_in_bytes": None
}
assert pd2 in paths_data["paths"]
def test_cherrypy_py36_osx_whl():
anchor_file = "lib/python3.6/site-packages/CherryPy-17.2.0.dist-info/RECORD"
prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl")
if not isdir(prefix_path):
pytest.skip("test files not found: %s" % prefix_path)
prefix_rec = read_python_record(prefix_path, anchor_file, "3.6")
dumped_rec = json_load(json_dump(prefix_rec.dump()))
files = dumped_rec.pop("files")
paths_data = dumped_rec.pop("paths_data")
print(json_dump(dumped_rec))
constrains = dumped_rec.pop("constrains")
depends = dumped_rec.pop("depends")
assert dumped_rec == {
"build": "pypi_0",
"build_number": 0,
"channel": "https://conda.anaconda.org/pypi",
"fn": "CherryPy-17.2.0.dist-info",
"name": "cherrypy",
"package_type": "virtual_python_wheel",
"subdir": "pypi",
"version": "17.2.0"
}
assert constrains == [
"jaraco-packaging >=3.2",
# "pypiwin32 ==219",
"pytest >=2.8",
"python-memcached >=1.58",
"routes >=2.3.1",
"rst-linker >=1.9"
]
if on_win:
assert depends == [
"cheroot >=6.2.4",
"more-itertools",
"portend >=2.1.1",
"python 3.6.*",
"pywin32",
"six >=1.11.0"
]
else:
assert depends == [
"cheroot >=6.2.4",
"more-itertools",
"portend >=2.1.1",
"python 3.6.*",
"six >=1.11.0"
]
def test_scrapy_py27_osx_no_binary():
anchor_file = "lib/python2.7/site-packages/Scrapy-1.5.1-py2.7.egg-info/PKG-INFO"
prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary")
if not isdir(prefix_path):
pytest.skip("test files not found: %s" % prefix_path)
prefix_rec = read_python_record(prefix_path, anchor_file, "2.7")
dumped_rec = json_load(json_dump(prefix_rec.dump()))
files = dumped_rec.pop("files")
paths_data = dumped_rec.pop("paths_data")
print(json_dump(dumped_rec))
assert dumped_rec == {
"build": "pypi_0",
"build_number": 0,
"channel": "https://conda.anaconda.org/pypi",
"constrains": [],
"depends": [
"cssselect >=0.9",
"lxml",
"parsel >=1.1",
"pydispatcher >=2.0.5",
"pyopenssl",
"python 2.7.*",
"queuelib",
"service-identity",
"six >=1.5.2",
"twisted >=13.1.0",
"w3lib >=1.17.0"
],
"fn": "Scrapy-1.5.1-py2.7.egg-info",
"name": "scrapy",
"package_type": "virtual_python_egg_manageable",
"subdir": "pypi",
"version": "1.5.1"
}
print(json_dump(files))
print(json_dump(paths_data["paths"]))
sp_dir = get_python_site_packages_short_path("2.7")
assert sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.py" in files
assert sp_dir + "/scrapy/downloadermiddlewares/decompression.pyc" in files
assert ("../bin/scrapy" if on_win else "bin/scrapy") in files
pd1 = {
"_path": sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.py",
"path_type": "hardlink"
}
assert pd1 in paths_data["paths"]
pd2 = {
"_path": sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.pyc",
"path_type": "hardlink"
}
assert pd2 in paths_data["paths"]
pd3 = {
"_path": "../bin/scrapy" if on_win else "bin/scrapy",
"path_type": "hardlink"
}
assert pd3 in paths_data["paths"]
def test_twilio_py27_osx_no_binary():
anchor_file = "lib/python2.7/site-packages/twilio-6.16.1-py2.7.egg-info/PKG-INFO"
prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary")
if not isdir(prefix_path):
pytest.skip("test files not found: %s" % prefix_path)
prefix_rec = read_python_record(prefix_path, anchor_file, "2.7")
pprint(prefix_rec.depends)
pprint(prefix_rec.constrains)
dumped_rec = json_load(json_dump(prefix_rec.dump()))
files = dumped_rec.pop("files")
paths_data = dumped_rec.pop("paths_data")
print(json_dump(dumped_rec))
assert dumped_rec == {
"build": "pypi_0",
"build_number": 0,
"channel": "https://conda.anaconda.org/pypi",
"constrains": [],
"depends": [
"pyjwt >=1.4.2",
"python 2.7.*",
"pytz",
"requests >=2.0.0",
"six"
],
"fn": "twilio-6.16.1-py2.7.egg-info",
"name": "twilio",
"package_type": "virtual_python_egg_manageable",
"subdir": "pypi",
"version": "6.16.1"
}
print(json_dump(files))
print(json_dump(paths_data["paths"]))
sp_dir = get_python_site_packages_short_path("2.7")
assert sp_dir + "/twilio/compat.py" in files
assert sp_dir + "/twilio/compat.pyc" in files
pd1 = {
"_path": sp_dir + "/twilio/compat.py",
"path_type": "hardlink"
}
assert pd1 in paths_data["paths"]
pd2 = {
"_path": sp_dir + "/twilio/jwt/compat.pyc",
"path_type": "hardlink"
}
assert pd2 in paths_data["paths"]
def test_pyjwt_py27_osx_no_binary():
anchor_file = "lib/python2.7/site-packages/PyJWT-1.6.4-py2.7.egg-info/PKG-INFO"
prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary")
if not isdir(prefix_path):
pytest.skip("test files not found: %s" % prefix_path)
prefix_rec = read_python_record(prefix_path, anchor_file, "2.7")
dumped_rec = json_load(json_dump(prefix_rec.dump()))
files = dumped_rec.pop("files")
paths_data = dumped_rec.pop("paths_data")
print(json_dump(dumped_rec))
assert dumped_rec == {
"build": "pypi_0",
"build_number": 0,
"channel": "https://conda.anaconda.org/pypi",
"constrains": [
"cryptography >=1.4",
"pytest <4,>3"
],
"depends": [
"python 2.7.*"
],
"fn": "PyJWT-1.6.4-py2.7.egg-info",
"name": "pyjwt",
"package_type": "virtual_python_egg_manageable",
"subdir": "pypi",
"version": "1.6.4"
}
print(json_dump(files))
print(json_dump(paths_data["paths"]))
sp_dir = get_python_site_packages_short_path("2.7")
assert ('../bin/pyjwt' if on_win else 'bin/pyjwt') in files
assert sp_dir + '/jwt/__init__.pyc' in files
pd1 = {
"_path": "../bin/pyjwt" if on_win else "bin/pyjwt",
"path_type": "hardlink"
}
assert pd1 in paths_data["paths"]
pd2 = {
"_path": sp_dir + "/jwt/contrib/__init__.pyc",
"path_type": "hardlink"
}
assert pd2 in paths_data["paths"]
def test_cherrypy_py27_osx_no_binary():
anchor_file = "lib/python2.7/site-packages/CherryPy-17.2.0-py2.7.egg-info/PKG-INFO"
prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary")
if not isdir(prefix_path):
pytest.skip("test files not found: %s" % prefix_path)
prefix_rec = read_python_record(prefix_path, anchor_file, "2.7")
dumped_rec = json_load(json_dump(prefix_rec.dump()))
files = dumped_rec.pop("files")
paths_data = dumped_rec.pop("paths_data")
print(json_dump(dumped_rec))
constrains = dumped_rec.pop("constrains")
depends = dumped_rec.pop("depends")
assert dumped_rec == {
"build": "pypi_0",
"build_number": 0,
"channel": "https://conda.anaconda.org/pypi",
"fn": "CherryPy-17.2.0-py2.7.egg-info",
"name": "cherrypy",
"package_type": "virtual_python_egg_manageable",
"subdir": "pypi",
"version": "17.2.0"
}
assert constrains == [
"jaraco-packaging >=3.2",
"pytest >=2.8",
"python-memcached >=1.58",
"routes >=2.3.1",
"rst-linker >=1.9"
]
if on_win:
assert depends == [
"cheroot >=6.2.4",
"more-itertools",
"portend >=2.1.1",
"python 2.7.*",
"pywin32",
"six >=1.11.0"
]
else:
assert depends == [
"cheroot >=6.2.4",
"more-itertools",
"portend >=2.1.1",
"python 2.7.*",
"six >=1.11.0"
]
def test_six_py27_osx_no_binary_unmanageable():
anchor_file = "lib/python2.7/site-packages/six-1.11.0-py2.7.egg-info/PKG-INFO"
prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary")
if not isdir(prefix_path):
pytest.skip("test files not found: %s" % prefix_path)
prefix_rec = read_python_record(prefix_path, anchor_file, "2.7")
dumped_rec = json_load(json_dump(prefix_rec.dump()))
files = dumped_rec.pop("files")
paths_data = dumped_rec.pop("paths_data")
print(json_dump(dumped_rec))
assert dumped_rec == {
"build": "pypi_0",
"build_number": 0,
"channel": "https://conda.anaconda.org/pypi",
"constrains": [],
"depends": [
"python 2.7.*"
],
"fn": "six-1.11.0-py2.7.egg-info",
"name": "six",
"package_type": "virtual_python_egg_unmanageable",
"subdir": "pypi",
"version": "1.11.0"
}
assert not files
assert not prefix_rec.paths_data.paths
| 33.505376
| 95
| 0.590629
| 2,034
| 15,580
| 4.265487
| 0.091445
| 0.05083
| 0.030429
| 0.025818
| 0.887736
| 0.873329
| 0.855694
| 0.816736
| 0.793799
| 0.7568
| 0
| 0.042244
| 0.253979
| 15,580
| 464
| 96
| 33.577586
| 0.704207
| 0.007125
| 0
| 0.759907
| 0
| 0.020979
| 0.331738
| 0.107734
| 0
| 0
| 0
| 0
| 0.102564
| 1
| 0.020979
| false
| 0
| 0.020979
| 0
| 0.041958
| 0.062937
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b23956d954136bb6ea385c306488c6bd52ff58f9
| 18,560
|
py
|
Python
|
escola/tests/prova_marcada/test_prova_marcadas_models.py
|
vini84200/medusa2
|
37cf33d05be8b0195b10845061ca893ba5e814dd
|
[
"MIT"
] | 1
|
2019-03-15T18:04:24.000Z
|
2019-03-15T18:04:24.000Z
|
escola/tests/prova_marcada/test_prova_marcadas_models.py
|
vini84200/medusa2
|
37cf33d05be8b0195b10845061ca893ba5e814dd
|
[
"MIT"
] | 22
|
2019-03-17T21:53:50.000Z
|
2021-03-31T19:12:19.000Z
|
escola/tests/prova_marcada/test_prova_marcadas_models.py
|
vini84200/medusa2
|
37cf33d05be8b0195b10845061ca893ba5e814dd
|
[
"MIT"
] | 1
|
2018-11-25T03:05:23.000Z
|
2018-11-25T03:05:23.000Z
|
import datetime
import pytest
from django.contrib.auth.models import User
from mixer.backend.django import mixer
from escola import user_utils
from escola.models import Evento, EventoTurma, ProvaMarcada, ProvaAreaMarcada, ProvaMateriaMarcada, Turma, \
MateriaDaTurma, AreaConhecimento, Conteudo
pytestmark = pytest.mark.django_db
# Turma
def test_turma_get_alunos(faker):
turma: Turma = mixer.blend(Turma)
assert len(turma.get_list_alunos()) == 0
aluno0 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name)
assert aluno0 in turma.get_list_alunos()
assert len(turma.get_list_alunos()) == 1
aluno1 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name)
assert aluno1 in turma.get_list_alunos()
assert len(turma.get_list_alunos()) == 2
# Evento
def test_evento_create(faker):
initial = Evento.objects.count()
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = Evento.create(nome, data, descricao, owner)
assert initial + 1 == Evento.objects.count()
def test_evento_get_nome(faker):
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = Evento.create(nome, data, descricao, owner)
assert a.get_nome() == nome
def test_evento_get_data(faker):
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = Evento.create(nome, data, descricao, owner)
assert a.get_data() == data
def test_evento_get_descricao(faker):
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = Evento.create(nome, data, descricao, owner)
assert a.get_descricao() == descricao
def test_evento_get_owner(faker):
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = Evento.create(nome, data, descricao, owner)
assert a.get_owner() == owner
def test_evento_somebody_has_permition_to_edit_denies(faker):
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = Evento.create(nome, data, descricao, owner)
somebody = mixer.blend(User)
assert a.has_permition_edit(somebody) is False
def test_evento_owner_has_permition_to_edit_accept(faker):
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = Evento.create(nome, data, descricao, owner)
assert a.has_permition_edit(owner) is True
def test_evento_update(faker):
nome_original = faker.sentence()
nome_novo = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = Evento.create(nome_original, data, descricao, owner)
assert not nome_novo == nome_original # Se isso der problema, rode novamente
assert a.get_nome() == nome_original
a.update(nome=nome_novo)
assert a.get_nome() == nome_novo
# Test Evento Turma
def test_evento_turma_create(faker):
initial = EventoTurma.objects.count()
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = EventoTurma.create(turma, nome, data, descricao, owner)
assert initial + 1 == EventoTurma.objects.count()
def test_evento_turma_get_nome(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = EventoTurma.create(turma, nome, data, descricao, owner)
assert a.get_nome() == nome
def test_evento_turma_get_data(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = EventoTurma.create(turma, nome, data, descricao, owner)
assert a.get_data() == data
def test_evento_turma_get_descricao(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = EventoTurma.create(turma, nome, data, descricao, owner)
assert a.get_descricao() == descricao
def test_evento_turma_get_owner(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = EventoTurma.create(turma, nome, data, descricao, owner)
assert a.get_owner() == owner
def test_evento_turma_get_turma(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = EventoTurma.create(turma, nome, data, descricao, owner)
assert a.get_turma() == turma
def test_evento_turma_somebody_has_permition_to_edit_denies(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = EventoTurma.create(turma, nome, data, descricao, owner)
somebody = mixer.blend(User)
assert a.has_permition_edit(somebody) is False
def test_evento_turma_owner_has_permition_to_edit_accept(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = EventoTurma.create(turma, nome, data, descricao, owner)
assert a.has_permition_edit(owner) is True
def test_evento_turma_update(faker):
turma_original = mixer.blend(Turma)
turma_nova = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = EventoTurma.create(turma_original, nome, data, descricao, owner)
assert not turma_nova == turma_original # Se isso der problema, rode novamente
assert a.get_turma() == turma_original
a.update(turma=turma_nova)
assert a.get_turma() == turma_nova
def test_evento_turma_get_participantes(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
aluno0 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name)
aluno1 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name)
a = EventoTurma.create(turma, nome, data, descricao, owner)
assert aluno0 in a.get_participantes()
assert aluno1 in a.get_participantes()
assert len(a.get_participantes()) == 2
# Prova Marcada
def test_prova_marcada_create(faker):
initial = ProvaMarcada.objects.count()
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMarcada.create(turma, nome, data, descricao, owner)
assert initial + 1 == ProvaMarcada.objects.count()
def test_prova_marcada_get_nome(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMarcada.create(turma, nome, data, descricao, owner)
assert a.get_nome() == nome
def test_prova_marcada_get_data(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMarcada.create(turma, nome, data, descricao, owner)
assert a.get_data() == data
def test_prova_marcada_get_descricao(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMarcada.create(turma, nome, data, descricao, owner)
assert a.get_descricao() == descricao
def test_prova_marcada_get_owner(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMarcada.create(turma, nome, data, descricao, owner)
assert a.get_owner() == owner
def test_prova_marcada_get_turma(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMarcada.create(turma, nome, data, descricao, owner)
assert a.get_turma() == turma
def test_prova_marcada_somebody_has_permition_to_edit_denies(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMarcada.create(turma, nome, data, descricao, owner)
somebody = mixer.blend(User)
assert a.has_permition_edit(somebody) is False
def test_prova_marcada_owner_has_permition_to_edit_accept(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMarcada.create(turma, nome, data, descricao, owner)
assert a.has_permition_edit(owner) is True
def test_prova_marcada_get_participantes(faker):
turma = mixer.blend(Turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
aluno0 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name)
aluno1 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name)
a = ProvaMarcada.create(turma, nome, data, descricao, owner)
assert aluno0 in a.get_participantes()
assert aluno1 in a.get_participantes()
assert len(a.get_participantes()) == 2
def test_prova_marcada_get_materias(faker):
materia = mixer.blend(MateriaDaTurma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
assert a._prova.get_materias() == a.get_materias()
assert a._prova.get_materias() == [materia, ]
# def test_prova_marcada_get_conteudos(faker):
# turma = mixer.blend(Turma)
# nome = faker.sentence()
# data = faker.date_time()
# descricao = faker.paragraph()
# owner = mixer.blend(User)
# a = ProvaMarcada.create(turma, nome, data, descricao, owner)
# assert None is a.get_conteudos()
# c = mixer.blend(Conteudo)
# a.conteudos.add(c)
# assert a.get_conteudos() == [c, ]
#
#
# def test_prova_marcada_add_conteudo(faker):
# turma = mixer.blend(Turma)
# nome = faker.sentence()
# data = faker.date_time()
# descricao = faker.paragraph()
# owner = mixer.blend(User)
# a = ProvaMarcada.create(turma, nome, data, descricao, owner)
# assert a.get_conteudos() == []
# c = mixer.blend(Conteudo)
# a.add_conteudo(c)
# assert a.get_conteudos() == [c, ]
#
#
# def test_prova_marcada_add_conteudos(faker):
# turma = mixer.blend(Turma)
# nome = faker.sentence()
# data = faker.date_time()
# descricao = faker.paragraph()
# owner = mixer.blend(User)
# a = ProvaMarcada.create(turma, nome, data, descricao, owner)
# assert a.get_conteudos() == []
# c0 = mixer.blend(Conteudo)
# c1 = mixer.blend(Conteudo)
# a.add_conteudos([c0, c1, ])
# assert a.get_conteudos() == [c0, c1, ]
# Prova Marcada Materia
def test_prova_marcada_materia_create(faker):
initial = ProvaMateriaMarcada.objects.count()
materia = mixer.blend(MateriaDaTurma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
assert a.get_materias() == [materia, ]
def test_prova_marcada_materia_get_nome(faker):
materia = mixer.blend(MateriaDaTurma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
assert a.get_nome() == nome
def test_prova_marcada_materia_get_data(faker):
materia = mixer.blend(MateriaDaTurma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
assert a.get_data() == data
def test_prova_marcada_materia_get_descricao(faker):
materia = mixer.blend(MateriaDaTurma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
assert a.get_descricao() == descricao
def test_prova_marcada_materia_get_owner(faker):
materia = mixer.blend(MateriaDaTurma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
assert a.get_owner() == owner
def test_prova_marcada_materia_get_turma(faker):
turma = mixer.blend(Turma)
materia = mixer.blend(MateriaDaTurma, turma=turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
assert a.get_turma() == turma
def test_prova_marcada_materia_somebody_has_permition_to_edit_denies(faker):
materia = mixer.blend(MateriaDaTurma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
somebody = mixer.blend(User)
assert a.has_permition_edit(somebody) is False
def test_prova_marcada_materia_owner_has_permition_to_edit_accept(faker):
materia = mixer.blend(MateriaDaTurma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
assert a.has_permition_edit(owner) is True
def test_prova_marcada_materia_get_participantes(faker):
turma = mixer.blend(Turma)
materia = mixer.blend(MateriaDaTurma, turma=turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
aluno0 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name())
aluno1 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name())
a = ProvaMateriaMarcada.create(materia, nome, data, descricao, owner)
assert aluno0 in a.get_participantes()
assert aluno1 in a.get_participantes()
assert len(a.get_participantes()) == 2
# TODO delete
# Prova Marcada Area
def test_prova_marcada_area_create(faker):
initial = ProvaAreaMarcada.objects.count()
area = mixer.blend(AreaConhecimento)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaAreaMarcada.create(area, nome, data, descricao, owner)
assert initial + 1 == ProvaAreaMarcada.objects.count()
def test_prova_marcada_area_get_nome(faker):
area = mixer.blend(AreaConhecimento)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaAreaMarcada.create(area, nome, data, descricao, owner)
assert a.get_nome() == nome
def test_prova_marcada_area_get_data(faker):
area = mixer.blend(AreaConhecimento)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaAreaMarcada.create(area, nome, data, descricao, owner)
assert a.get_data() == data
def test_prova_marcada_area_get_descricao(faker):
area = mixer.blend(AreaConhecimento)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaAreaMarcada.create(area, nome, data, descricao, owner)
assert a.get_descricao() == descricao
def test_prova_marcada_area_get_owner(faker):
area = mixer.blend(AreaConhecimento)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaAreaMarcada.create(area, nome, data, descricao, owner)
assert a.get_owner() == owner
def test_prova_marcada_area_get_turma(faker):
turma = mixer.blend(Turma)
area = mixer.blend(AreaConhecimento, turma=turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaAreaMarcada.create(area, nome, data, descricao, owner)
assert a.get_turma() == turma
def test_prova_marcada_area_somebody_has_permition_to_edit_denies(faker):
area = mixer.blend(AreaConhecimento)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaAreaMarcada.create(area, nome, data, descricao, owner)
somebody = mixer.blend(User)
assert a.has_permition_edit(somebody) is False
def test_prova_marcada_area_owner_has_permition_to_edit_accept(faker):
area = mixer.blend(AreaConhecimento)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
a = ProvaAreaMarcada.create(area, nome, data, descricao, owner)
assert a.has_permition_edit(owner) is True
def test_prova_marcada_area_get_participantes(faker):
turma = mixer.blend(Turma)
area = mixer.blend(AreaConhecimento, turma=turma)
nome = faker.sentence()
data = faker.date_time()
descricao = faker.paragraph()
owner = mixer.blend(User)
aluno0 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name)
aluno1 = user_utils.create_aluno_user(faker.user_name(), faker.password(), turma, faker.name)
a = ProvaAreaMarcada.create(area, nome, data, descricao, owner)
assert aluno0 in a.get_participantes()
assert aluno1 in a.get_participantes()
assert len(a.get_participantes()) == 2
# TODO delete
| 32.278261
| 108
| 0.701778
| 2,372
| 18,560
| 5.3086
| 0.038364
| 0.083386
| 0.060038
| 0.08561
| 0.916852
| 0.886118
| 0.869282
| 0.844028
| 0.839819
| 0.832116
| 0
| 0.002436
| 0.181681
| 18,560
| 574
| 109
| 32.334495
| 0.826639
| 0.075054
| 0
| 0.786058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001742
| 0.153846
| 1
| 0.112981
| false
| 0.024038
| 0.014423
| 0
| 0.127404
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b24385c22ebff2ca43970b363c7842f9b3e34f3f
| 899
|
py
|
Python
|
logictables.py
|
finnks13/functional-completeness-checker
|
d04ca9bbcf56383621f14e6d5ea68587ef4a2c26
|
[
"MIT"
] | null | null | null |
logictables.py
|
finnks13/functional-completeness-checker
|
d04ca9bbcf56383621f14e6d5ea68587ef4a2c26
|
[
"MIT"
] | null | null | null |
logictables.py
|
finnks13/functional-completeness-checker
|
d04ca9bbcf56383621f14e6d5ea68587ef4a2c26
|
[
"MIT"
] | null | null | null |
def return1():
return [[0],[1]]
def return2():
return [[0,0],[0,1],[1,0],[1,1]]
def return3():
return [[0,0,0],[0,0,1],[0,1,0],[0,1,1],[1,0,0],[1,0,1],[1,1,0],[1,1,1]]
def return4():
# i hope i got these right
return [[0,0,0,0],[0,0,0,1],[0,0,1,0],[0,0,1,1],[0,1,0,0],[0,1,0,1],[0,1,1,0],[0,1,1,1],
[1,0,0,0],[1,0,0,1],[1,0,1,0],[1,0,1,1],[1,1,0,0],[1,1,0,1],[1,1,1,0],[1,1,1,1]]
def return5():
# i really hope that i got these right
return [[0,0,0,0,0],[0,0,0,0,1],[0,0,0,1,0],[0,0,0,1,1],[0,0,1,0,0],[0,0,1,0,1],[0,0,1,1,0],[0,0,1,1,1],
[0,1,0,0,0],[0,1,0,0,1],[0,1,0,1,0],[0,1,0,1,1],[0,1,1,0,0],[0,1,1,0,1],[0,1,1,1,0],[0,1,1,1,1],
[1,0,0,0,0],[1,0,0,0,1],[1,0,0,1,0],[1,0,0,1,1],[1,0,1,0,0],[1,0,1,0,1],[1,0,1,1,0],[1,0,1,1,1],
[1,1,0,0,0],[1,1,0,0,1],[1,1,0,1,0],[1,1,0,1,1],[1,1,1,0,0],[1,1,1,0,1],[1,1,1,1,0],[1,1,1,1,1]]
| 44.95
| 109
| 0.429366
| 287
| 899
| 1.344948
| 0.055749
| 0.336788
| 0.256477
| 0.176166
| 0.782383
| 0.782383
| 0.766839
| 0.738342
| 0.637306
| 0.251295
| 0
| 0.332911
| 0.121246
| 899
| 20
| 110
| 44.95
| 0.155696
| 0.067853
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.357143
| true
| 0
| 0
| 0.357143
| 0.714286
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 13
|
b27ab0ebbe5f4d64ad79c2934edb0351a9318c27
| 241
|
py
|
Python
|
timemachines/inclusion/statsmodelsinclusion.py
|
iklasky/timemachines
|
1820fa9453d31d4daaeff75274a935c7455febe3
|
[
"MIT"
] | 253
|
2021-01-08T17:33:30.000Z
|
2022-03-21T17:32:36.000Z
|
timemachines/inclusion/statsmodelsinclusion.py
|
iklasky/timemachines
|
1820fa9453d31d4daaeff75274a935c7455febe3
|
[
"MIT"
] | 65
|
2021-01-20T16:43:35.000Z
|
2022-03-30T19:07:22.000Z
|
timemachines/inclusion/statsmodelsinclusion.py
|
iklasky/timemachines
|
1820fa9453d31d4daaeff75274a935c7455febe3
|
[
"MIT"
] | 28
|
2021-02-04T14:58:30.000Z
|
2022-01-17T04:35:17.000Z
|
try:
from statsmodels.tsa.arima.model import ARIMA
using_statsmodels = True
except ImportError:
try:
from statsmodels.tsa.arima_model import ARIMA
using_statsmodels = True
except:
using_statsmodels = False
| 24.1
| 52
| 0.713693
| 28
| 241
| 6
| 0.428571
| 0.285714
| 0.214286
| 0.25
| 0.809524
| 0.809524
| 0.809524
| 0.809524
| 0.809524
| 0.809524
| 0
| 0
| 0.236515
| 241
| 9
| 53
| 26.777778
| 0.913043
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.