hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7d89d7bc0209c44c0dee2a545f4f655a803b15ba
| 293
|
py
|
Python
|
guillotina/json/__init__.py
|
rboixaderg/guillotina
|
fcae65c2185222272f3b8fee4bc2754e81e0e983
|
[
"BSD-2-Clause"
] | 173
|
2017-03-10T18:26:12.000Z
|
2022-03-03T06:48:56.000Z
|
guillotina/json/__init__.py
|
rboixaderg/guillotina
|
fcae65c2185222272f3b8fee4bc2754e81e0e983
|
[
"BSD-2-Clause"
] | 921
|
2017-03-08T14:04:43.000Z
|
2022-03-30T10:28:56.000Z
|
guillotina/json/__init__.py
|
rboixaderg/guillotina
|
fcae65c2185222272f3b8fee4bc2754e81e0e983
|
[
"BSD-2-Clause"
] | 60
|
2017-03-16T19:59:44.000Z
|
2022-03-03T06:48:59.000Z
|
# make sure configure registrations are executed
from . import deserialize_content # noqa
from . import deserialize_value # noqa
from . import serialize_content # noqa
from . import serialize_schema # noqa
from . import serialize_schema_field # noqa
from . import serialize_value # noqa
| 36.625
| 48
| 0.788396
| 37
| 293
| 6.054054
| 0.405405
| 0.267857
| 0.3125
| 0.410714
| 0.258929
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167235
| 293
| 7
| 49
| 41.857143
| 0.918033
| 0.259386
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7dc7463c522eda207ed92b12fa65e45d0b704ffa
| 187
|
py
|
Python
|
lektor_groupby/__init__.py
|
relikd/lektor-groupby-plugin
|
eb0a60ab33648b6b8ee633a5af86ee61d9398abd
|
[
"MIT"
] | null | null | null |
lektor_groupby/__init__.py
|
relikd/lektor-groupby-plugin
|
eb0a60ab33648b6b8ee633a5af86ee61d9398abd
|
[
"MIT"
] | null | null | null |
lektor_groupby/__init__.py
|
relikd/lektor-groupby-plugin
|
eb0a60ab33648b6b8ee633a5af86ee61d9398abd
|
[
"MIT"
] | null | null | null |
from .config import Config # noqa: F401
from .groupby import GroupBy # noqa: F401
from .plugin import GroupByPlugin # noqa: F401
from .watcher import GroupByCallbackArgs # noqa: F401
| 37.4
| 54
| 0.764706
| 24
| 187
| 5.958333
| 0.416667
| 0.223776
| 0.251748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077419
| 0.171123
| 187
| 4
| 55
| 46.75
| 0.845161
| 0.229947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7de3665d2f3ae41d40846e1aaedea87b1646b771
| 7,556
|
py
|
Python
|
parser/team24/graphQueries.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 35
|
2020-12-07T03:11:43.000Z
|
2021-04-15T17:38:16.000Z
|
parser/team24/graphQueries.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 47
|
2020-12-09T01:29:09.000Z
|
2021-01-13T05:37:50.000Z
|
parser/team24/graphQueries.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 556
|
2020-12-07T03:13:31.000Z
|
2021-06-17T17:41:10.000Z
|
import graphviz as gr
import classesQuerys as cq
def graphTree(queries):
g = gr.Digraph()
g.node('queries','Lista de Queries')
for inst in queries:
g.node(str(hash(inst)),'Query')
g.edge('queries',str(hash(inst)))
graphSelect(inst,g)
g.render('tree.svg',view=True)
def graphSelect(query,graph):
hashstr = str(hash(query))
#Graficando el distinct
graph.node(hashstr+'distinct','Distinct')
graph.node(hashstr+'distinctTF',str(query.distinct))
graph.edge(hashstr,hashstr+'distinct')
graph.edge(hashstr+'distinct',hashstr+'distinctTF')
#Graficando el Select_List
graph.node(hashstr+'select_list','Select List')
graph.edge(hashstr,hashstr+'select_list')
for col in query.select_list:
colhash = str(hash(col))
#Si la columna es de tipo ID
if isinstance(col,cq.exp_id):
graph.node(colhash,'ID column')
graph.node(colhash+'val','Val : '+col.val)
graph.node(colhash+'table','Table : '+str(col.table))
graph.edge(hashstr+'select_list',colhash)
graph.edge(colhash,colhash+'val')
graph.edge(colhash,colhash+'table')
graph.node(colhash+'alias','Alias: '+str(col.alias))
graph.edge(colhash,colhash+'alias')
#Si la columna es una funcion matemática
# o trigonométrica
if isinstance(col,cq.column_mathtrig):
graph.node(colhash,'Function column: '+str(col.__class__.__name__))
graph.edge(hashstr+'select_list',colhash)
##
#Escribimos dependiendo de los atributos
if hasattr(col,'exp'):
graph.node(colhash+'exp','Exp : '+str(col.exp.val))
graph.edge(colhash,colhash+'exp')
if hasattr(col,'exp1'):
graph.node(colhash+'exp1','Exp1 : '+str(col.exp1.val))
graph.edge(colhash,colhash+'exp1')
if hasattr(col,'exp2'):
graph.node(colhash+'exp2','Exp2 : '+str(col.exp2.val))
graph.edge(colhash,colhash+'exp2')
if hasattr(col,'exp3'):
graph.node(colhash+'exp3','Exp3 : '+str(col.exp3.val))
graph.edge(colhash,colhash+'exp3')
if hasattr(col,'exp4'):
graph.node(colhash+'exp4','Exp4 : '+str(col.exp4.val))
graph.edge(colhash,colhash+'exp4')
##
graph.node(colhash+'alias','Alias: '+str(col.alias))
graph.edge(colhash,colhash+'alias')
#Si la columna es una funcion
# de utilidad
if isinstance(col,cq.column_function):
graph.node(colhash,'Utility column: '+str(col.__class__.__name__))
graph.edge(hashstr+'select_list',colhash)
##
#Escribimos dependiendo de los atributos
if hasattr(col,'exp'):
graph.node(colhash+'exp','Exp : '+str(col.exp.val))
graph.edge(colhash,colhash+'exp')
if hasattr(col,'min'):
graph.node(colhash+'min','From : '+str(col.min))
graph.edge(colhash,colhash+'min')
if hasattr(col,'max'):
graph.node(colhash+'max','To : '+str(col.max))
graph.edge(colhash,colhash+'max')
if hasattr(col,'type'):
graph.node(colhash+'type','Type : '+str(col.type))
graph.edge(colhash,colhash+'type')
if hasattr(col,'lexps'):
graph.node(colhash+'lexps','Lista de expresiones')
graph.edge(colhash,colhash+'lexps')
lexps = col.lexps
graph_exps(lexps,graph,'lexps',colhash)
if hasattr(col,'union'):
graph.node(colhash+'union','Condición booleana: '+str(col.union))
graph.edge(colhash,colhash+'union')
##
graph.node(colhash+'alias','Alias: '+str(col.alias))
graph.edge(colhash,colhash+'alias')
def graph_exps(list,graph,text,hashstr):
for col in list:
colhash = str(hash(col))
#Si la columna es de tipo ID
if isinstance(col,cq.exp_id):
graph.node(colhash,'ID column')
graph.node(colhash+'val','Val : '+col.val)
graph.node(colhash+'table','Table : '+str(col.table))
graph.edge(hashstr+text,colhash)
graph.edge(colhash,colhash+'val')
graph.edge(colhash,colhash+'table')
graph.node(colhash+'alias','Alias: '+str(col.alias))
graph.edge(colhash,colhash+'alias')
#Si la columna es una funcion matemática
# o trigonométrica
if isinstance(col,cq.column_mathtrig):
graph.node(colhash,'Function column: '+str(col.__class__.__name__))
graph.edge(hashstr+text,colhash)
##
#Escribimos dependiendo de los atributos
if hasattr(col,'exp'):
graph.node(colhash+'exp','Exp : '+str(col.exp.val))
graph.edge(colhash,colhash+'exp')
if hasattr(col,'exp1'):
graph.node(colhash+'exp1','Exp1 : '+str(col.exp1.val))
graph.edge(colhash,colhash+'exp1')
if hasattr(col,'exp2'):
graph.node(colhash+'exp2','Exp2 : '+str(col.exp2.val))
graph.edge(colhash,colhash+'exp2')
if hasattr(col,'exp3'):
graph.node(colhash+'exp3','Exp3 : '+str(col.exp3.val))
graph.edge(colhash,colhash+'exp3')
if hasattr(col,'exp4'):
graph.node(colhash+'exp4','Exp4 : '+str(col.exp4.val))
graph.edge(colhash,colhash+'exp4')
##
graph.node(colhash+'alias','Alias: '+str(col.alias))
graph.edge(colhash,colhash+'alias')
#Si la columna es una funcion
# de utilidad
if isinstance(col,cq.column_mathtrig):
graph.node(colhash,'Utility column: '+str(col.__class__.__name__))
graph.edge(hashstr+text,colhash)
##
#Escribimos dependiendo de los atributos
if hasattr(col,'exp'):
graph.node(colhash+'exp','Exp : '+str(col.exp.val))
graph.edge(colhash,colhash+'exp')
if hasattr(col,'min'):
graph.node(colhash+'min','From : '+str(col.min))
graph.edge(colhash,colhash+'min')
if hasattr(col,'max'):
graph.node(colhash+'max','To : '+str(col.max))
graph.edge(colhash,colhash+'max')
if hasattr(col,'type'):
graph.node(colhash+'type','Type : '+str(col.type))
graph.edge(colhash,colhash+'type')
if hasattr(col,'lexps'):
graph.node(colhash+'lexps','Lista de exp')
graph.edge(colhash,colhash+'lexps')
lexps = col.lexps
graph_exps(lexps,graph,'lexps',hashstr)
if hasattr(col,'union'):
graph.node(colhash+'union','Condición booleana: '+str(col.union))
graph.edge(colhash,colhash+'union')
##
if isinstance(col,cq.exp_num):
#Si es una expresión numérica
graph.node(colhash,'Number: '+str(col.val))
graph.edge(hashstr+text,colhash)
if isinstance(col,cq.exp_text):
#Si es una expresión numérica
graph.node(colhash,'Number: '+str(col.val))
graph.edge(hashstr+text,colhash)
| 39.354167
| 81
| 0.549894
| 869
| 7,556
| 4.72382
| 0.098964
| 0.092083
| 0.15201
| 0.173691
| 0.854811
| 0.844093
| 0.841657
| 0.841657
| 0.841657
| 0.839708
| 0
| 0.007571
| 0.300821
| 7,556
| 191
| 82
| 39.560209
| 0.769449
| 0.067099
| 0
| 0.792593
| 0
| 0
| 0.122327
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0
| 0.014815
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
81c043de92d58c854c95e00e3c4818e66d96ef08
| 5,086
|
py
|
Python
|
examples/semantic_segmentation/modelNet/aspp.py
|
TrainingDML/pytdml
|
b1c21533a44d931717d9398cbdc57b1ee4ef3302
|
[
"MIT"
] | 2
|
2022-03-25T13:12:05.000Z
|
2022-03-29T07:09:03.000Z
|
examples/semantic_segmentation/modelNet/aspp.py
|
TrainingDML/pytdml
|
b1c21533a44d931717d9398cbdc57b1ee4ef3302
|
[
"MIT"
] | null | null | null |
examples/semantic_segmentation/modelNet/aspp.py
|
TrainingDML/pytdml
|
b1c21533a44d931717d9398cbdc57b1ee4ef3302
|
[
"MIT"
] | 1
|
2022-03-31T06:11:17.000Z
|
2022-03-31T06:11:17.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class ASPP(nn.Module):
def __init__(self, num_classes):
super(ASPP, self).__init__()
self.conv_1x1_1 = nn.Conv2d(512, 256, kernel_size=1)
self.bn_conv_1x1_1 = nn.BatchNorm2d(256)
self.conv_3x3_1 = nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=6, dilation=6)
self.bn_conv_3x3_1 = nn.BatchNorm2d(256)
self.conv_3x3_2 = nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=12, dilation=12)
self.bn_conv_3x3_2 = nn.BatchNorm2d(256)
self.conv_3x3_3 = nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=18, dilation=18)
self.bn_conv_3x3_3 = nn.BatchNorm2d(256)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.conv_1x1_2 = nn.Conv2d(512, 256, kernel_size=1)
self.bn_conv_1x1_2 = nn.BatchNorm2d(256)
self.conv_1x1_3 = nn.Conv2d(1280, 256, kernel_size=1) # (1280 = 5*256)
self.bn_conv_1x1_3 = nn.BatchNorm2d(256)
self.conv_1x1_4 = nn.Conv2d(256, num_classes, kernel_size=1)
def forward(self, feature_map):
# (feature_map has shape (batch_size, 512, h/16, w/16)) (assuming self.resnet is ResNet18_OS16 or ResNet34_OS16. If self.resnet instead is ResNet18_OS8 or ResNet34_OS8, it will be (batch_size, 512, h/8, w/8))
feature_map_h = feature_map.size()[2] # (== h/16)
feature_map_w = feature_map.size()[3] # (== w/16)
out_1x1 = F.relu(self.bn_conv_1x1_1(self.conv_1x1_1(feature_map))) # (shape: (batch_size, 256, h/16, w/16))
out_3x3_1 = F.relu(self.bn_conv_3x3_1(self.conv_3x3_1(feature_map))) # (shape: (batch_size, 256, h/16, w/16))
out_3x3_2 = F.relu(self.bn_conv_3x3_2(self.conv_3x3_2(feature_map))) # (shape: (batch_size, 256, h/16, w/16))
out_3x3_3 = F.relu(self.bn_conv_3x3_3(self.conv_3x3_3(feature_map))) # (shape: (batch_size, 256, h/16, w/16))
out_img = self.avg_pool(feature_map) # (shape: (batch_size, 512, 1, 1))
out_img = F.relu(self.bn_conv_1x1_2(self.conv_1x1_2(out_img))) # (shape: (batch_size, 256, 1, 1))
out_img = F.upsample(out_img, size=(feature_map_h, feature_map_w),
mode="bilinear") # (shape: (batch_size, 256, h/16, w/16))
out = torch.cat([out_1x1, out_3x3_1, out_3x3_2, out_3x3_3, out_img],
1) # (shape: (batch_size, 1280, h/16, w/16))
out = F.relu(self.bn_conv_1x1_3(self.conv_1x1_3(out))) # (shape: (batch_size, 256, h/16, w/16))
out = self.conv_1x1_4(out) # (shape: (batch_size, num_classes, h/16, w/16))
return out
class ASPP_Bottleneck(nn.Module):
def __init__(self, num_classes):
super(ASPP_Bottleneck, self).__init__()
self.conv_1x1_1 = nn.Conv2d(4 * 512, 256, kernel_size=1)
self.bn_conv_1x1_1 = nn.BatchNorm2d(256)
self.conv_3x3_1 = nn.Conv2d(4 * 512, 256, kernel_size=3, stride=1, padding=6, dilation=6)
self.bn_conv_3x3_1 = nn.BatchNorm2d(256)
self.conv_3x3_2 = nn.Conv2d(4 * 512, 256, kernel_size=3, stride=1, padding=12, dilation=12)
self.bn_conv_3x3_2 = nn.BatchNorm2d(256)
self.conv_3x3_3 = nn.Conv2d(4 * 512, 256, kernel_size=3, stride=1, padding=18, dilation=18)
self.bn_conv_3x3_3 = nn.BatchNorm2d(256)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.conv_1x1_2 = nn.Conv2d(4 * 512, 256, kernel_size=1)
self.bn_conv_1x1_2 = nn.BatchNorm2d(256)
self.conv_1x1_3 = nn.Conv2d(1280, 256, kernel_size=1) # (1280 = 5*256)
self.bn_conv_1x1_3 = nn.BatchNorm2d(256)
self.conv_1x1_4 = nn.Conv2d(256, num_classes, kernel_size=1)
def forward(self, feature_map):
# (feature_map has shape (batch_size, 4*512, h/16, w/16))
feature_map_h = feature_map.size()[2] # (== h/16)
feature_map_w = feature_map.size()[3] # (== w/16)
out_1x1 = F.relu(self.bn_conv_1x1_1(self.conv_1x1_1(feature_map))) # (shape: (batch_size, 256, h/16, w/16))
out_3x3_1 = F.relu(self.bn_conv_3x3_1(self.conv_3x3_1(feature_map))) # (shape: (batch_size, 256, h/16, w/16))
out_3x3_2 = F.relu(self.bn_conv_3x3_2(self.conv_3x3_2(feature_map))) # (shape: (batch_size, 256, h/16, w/16))
out_3x3_3 = F.relu(self.bn_conv_3x3_3(self.conv_3x3_3(feature_map))) # (shape: (batch_size, 256, h/16, w/16))
out_img = self.avg_pool(feature_map) # (shape: (batch_size, 512, 1, 1))
out_img = F.relu(self.bn_conv_1x1_2(self.conv_1x1_2(out_img))) # (shape: (batch_size, 256, 1, 1))
out_img = F.upsample(out_img, size=(feature_map_h, feature_map_w),
mode="bilinear") # (shape: (batch_size, 256, h/16, w/16))
out = torch.cat([out_1x1, out_3x3_1, out_3x3_2, out_3x3_3, out_img],
1) # (shape: (batch_size, 1280, h/16, w/16))
out = F.relu(self.bn_conv_1x1_3(self.conv_1x1_3(out))) # (shape: (batch_size, 256, h/16, w/16))
out = self.conv_1x1_4(out) # (shape: (batch_size, num_classes, h/16, w/16))
return out
| 48.903846
| 216
| 0.639796
| 880
| 5,086
| 3.389773
| 0.081818
| 0.075092
| 0.080456
| 0.036205
| 0.930272
| 0.924237
| 0.924237
| 0.924237
| 0.910828
| 0.88535
| 0
| 0.145604
| 0.212741
| 5,086
| 103
| 217
| 49.378641
| 0.599401
| 0.21746
| 0
| 0.753623
| 0
| 0
| 0.00405
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057971
| false
| 0
| 0.043478
| 0
| 0.15942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c48e2a941baa809fdb9fba39af06983c2e542897
| 11,320
|
py
|
Python
|
tests/components/nuki/test_config_flow.py
|
pcaston/core
|
e74d946cef7a9d4e232ae9e0ba150d18018cfe33
|
[
"Apache-2.0"
] | 1
|
2021-07-08T20:09:55.000Z
|
2021-07-08T20:09:55.000Z
|
tests/components/nuki/test_config_flow.py
|
pcaston/core
|
e74d946cef7a9d4e232ae9e0ba150d18018cfe33
|
[
"Apache-2.0"
] | 47
|
2021-02-21T23:43:07.000Z
|
2022-03-31T06:07:10.000Z
|
tests/components/nuki/test_config_flow.py
|
OpenPeerPower/core
|
f673dfac9f2d0c48fa30af37b0a99df9dd6640ee
|
[
"Apache-2.0"
] | null | null | null |
"""Test the nuki config flow."""
from unittest.mock import patch
from pynuki.bridge import InvalidCredentialsException
from requests.exceptions import RequestException
from openpeerpower import config_entries, data_entry_flow, setup
from openpeerpower.components.dhcp import HOSTNAME, IP_ADDRESS, MAC_ADDRESS
from openpeerpower.components.nuki.const import DOMAIN
from openpeerpower.const import CONF_TOKEN
from .mock import HOST, MAC, MOCK_INFO, NAME, setup_nuki_integration
async def test_form(opp):
"""Test we get the form."""
await setup.async_setup_component(opp, "persistent_notification", {})
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
return_value=MOCK_INFO,
), patch(
"openpeerpower.components.nuki.async_setup", return_value=True
) as mock_setup, patch(
"openpeerpower.components.nuki.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"port": 8080,
"token": "test-token",
},
)
await opp.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == 123456789
assert result2["data"] == {
"host": "1.1.1.1",
"port": 8080,
"token": "test-token",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_import(opp):
"""Test that the import works."""
await setup.async_setup_component(opp, "persistent_notification", {})
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
return_value=MOCK_INFO,
), patch(
"openpeerpower.components.nuki.async_setup", return_value=True
) as mock_setup, patch(
"openpeerpower.components.nuki.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await opp.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": "1.1.1.1", "port": 8080, "token": "test-token"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == 123456789
assert result["data"] == {
"host": "1.1.1.1",
"port": 8080,
"token": "test-token",
}
await opp.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(opp):
"""Test we handle invalid auth."""
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
side_effect=InvalidCredentialsException,
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"port": 8080,
"token": "test-token",
},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(opp):
"""Test we handle cannot connect error."""
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
side_effect=RequestException,
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"port": 8080,
"token": "test-token",
},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_unknown_exception(opp):
"""Test we handle unknown exceptions."""
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
side_effect=Exception,
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"port": 8080,
"token": "test-token",
},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "unknown"}
async def test_form_already_configured(opp):
"""Test we get the form."""
await setup_nuki_integration(opp)
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
return_value=MOCK_INFO,
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"port": 8080,
"token": "test-token",
},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result2["reason"] == "already_configured"
async def test_dhcp_flow(opp):
"""Test that DHCP discovery for new bridge works."""
result = await opp.config_entries.flow.async_init(
DOMAIN,
data={HOSTNAME: NAME, IP_ADDRESS: HOST, MAC_ADDRESS: MAC},
context={"source": config_entries.SOURCE_DHCP},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == config_entries.SOURCE_USER
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
return_value=MOCK_INFO,
), patch(
"openpeerpower.components.nuki.async_setup", return_value=True
) as mock_setup, patch(
"openpeerpower.components.nuki.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"port": 8080,
"token": "test-token",
},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == 123456789
assert result2["data"] == {
"host": "1.1.1.1",
"port": 8080,
"token": "test-token",
}
await opp.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_dhcp_flow_already_configured(opp):
"""Test that DHCP doesn't setup already configured devices."""
await setup_nuki_integration(opp)
result = await opp.config_entries.flow.async_init(
DOMAIN,
data={HOSTNAME: NAME, IP_ADDRESS: HOST, MAC_ADDRESS: MAC},
context={"source": config_entries.SOURCE_DHCP},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_reauth_success(opp):
"""Test starting a reauthentication flow."""
entry = await setup_nuki_integration(opp)
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "reauth_confirm"
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
return_value=MOCK_INFO,
), patch("openpeerpower.components.nuki.async_setup", return_value=True), patch(
"openpeerpower.components.nuki.async_setup_entry",
return_value=True,
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_TOKEN: "new-token"},
)
await opp.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result2["reason"] == "reauth_successful"
assert entry.data[CONF_TOKEN] == "new-token"
async def test_reauth_invalid_auth(opp):
"""Test starting a reauthentication flow with invalid auth."""
entry = await setup_nuki_integration(opp)
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "reauth_confirm"
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
side_effect=InvalidCredentialsException,
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_TOKEN: "new-token"},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["step_id"] == "reauth_confirm"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_reauth_cannot_connect(opp):
"""Test starting a reauthentication flow with cannot connect."""
entry = await setup_nuki_integration(opp)
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "reauth_confirm"
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
side_effect=RequestException,
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_TOKEN: "new-token"},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["step_id"] == "reauth_confirm"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_reauth_unknown_exception(opp):
"""Test starting a reauthentication flow with an unknown exception."""
entry = await setup_nuki_integration(opp)
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "reauth_confirm"
with patch(
"openpeerpower.components.nuki.config_flow.NukiBridge.info",
side_effect=Exception,
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_TOKEN: "new-token"},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["step_id"] == "reauth_confirm"
assert result2["errors"] == {"base": "unknown"}
| 34.199396
| 84
| 0.639046
| 1,313
| 11,320
| 5.257426
| 0.080731
| 0.067797
| 0.044618
| 0.066927
| 0.862234
| 0.857888
| 0.843401
| 0.826018
| 0.802405
| 0.782124
| 0
| 0.017217
| 0.240636
| 11,320
| 330
| 85
| 34.30303
| 0.785831
| 0.002297
| 0
| 0.731801
| 0
| 0
| 0.183394
| 0.095518
| 0
| 0
| 0
| 0
| 0.187739
| 1
| 0
| false
| 0
| 0.038314
| 0
| 0.038314
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c49e2e563fcab31d6a67c9a110a1a44c135f76a0
| 79
|
py
|
Python
|
02/5 - Sort of Strangeness.py
|
Surferlul/csc-python-solutions
|
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
|
[
"MIT"
] | null | null | null |
02/5 - Sort of Strangeness.py
|
Surferlul/csc-python-solutions
|
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
|
[
"MIT"
] | null | null | null |
02/5 - Sort of Strangeness.py
|
Surferlul/csc-python-solutions
|
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
|
[
"MIT"
] | null | null | null |
print(min(x, y, z))
print(x+y+z-min(x, y, z)-max(x, y, z))
print(max(x, y, z))
| 19.75
| 38
| 0.531646
| 22
| 79
| 1.909091
| 0.272727
| 0.238095
| 0.357143
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139241
| 79
| 3
| 39
| 26.333333
| 0.617647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
c4b863cbfd60af28119aeb047325dc37944f335d
| 5,027
|
py
|
Python
|
tests/math/test_max.py
|
maki-nage/rxsci
|
64c9956752cbdd4c65aa9f054b6b28318a056625
|
[
"MIT"
] | 3
|
2021-05-03T13:40:46.000Z
|
2022-03-06T07:59:30.000Z
|
tests/math/test_max.py
|
maki-nage/rxsci
|
64c9956752cbdd4c65aa9f054b6b28318a056625
|
[
"MIT"
] | 9
|
2020-10-22T21:08:10.000Z
|
2021-08-05T09:01:26.000Z
|
tests/math/test_max.py
|
maki-nage/rxsci
|
64c9956752cbdd4c65aa9f054b6b28318a056625
|
[
"MIT"
] | 2
|
2021-01-05T16:48:54.000Z
|
2021-08-07T12:51:01.000Z
|
from pytest import approx
import rx
import rxsci as rs
def test_max_empty():
source = []
actual_result = []
rx.from_(source).pipe(
rs.math.max()
).subscribe(on_next=actual_result.append)
assert actual_result == [None]
def test_max_empty_reduce():
source = []
actual_result = []
rx.from_(source).pipe(
rs.math.max(reduce=True)
).subscribe(on_next=actual_result.append)
assert actual_result == [None]
def test_max_int():
source = [4, 10, 3, 2]
actual_result = []
rx.from_(source).pipe(
rs.math.max()
).subscribe(on_next=actual_result.append)
assert actual_result == [4, 10, 10, 10]
def test_max_int_reduce():
source = [4, 10, 3, 2]
expected_result = [10]
actual_result = []
rx.from_(source).pipe(
rs.math.max(reduce=True)
).subscribe(on_next=actual_result.append)
assert actual_result == expected_result
def test_max_float():
source = [2.76, 3, 10.43, 4]
expected_result = [10.43]
actual_result = []
rx.from_(source).pipe(
rs.math.max(reduce=True)
).subscribe(on_next=actual_result.append)
assert len(actual_result) == 1
assert actual_result[0] == expected_result[0]
def test_max_key_mapper():
source = [('a', 2), ('b', 3), ('c', 10), ('d', 4)]
expected_result = [10]
actual_result = []
rx.from_(source).pipe(
rs.math.max(lambda i: i[1], reduce=True)
).subscribe(
on_next=actual_result.append,
on_error=lambda e: print(e))
assert actual_result == expected_result
def test_max_mux():
source = [
rs.OnCreateMux((1 ,None)),
rs.OnNextMux((1, None), 4),
rs.OnCreateMux((2, None)),
rs.OnNextMux((2, None), 8),
rs.OnNextMux((2, None), 6),
rs.OnNextMux((1, None), 10),
rs.OnNextMux((1, None), 3),
rs.OnCompletedMux((1, None)),
rs.OnCompletedMux((2, None)),
]
actual_error = []
actual_completed = []
actual_result = []
def on_completed():
actual_completed.append(True)
rx.from_(source).pipe(
rs.cast_as_mux_observable(),
rs.state.with_memory_store(
rs.math.max(),
),
).subscribe(
on_next=actual_result.append,
on_completed=on_completed,
on_error=actual_error.append,
)
assert actual_error == []
assert actual_completed == [True]
try:
assert actual_result == [
rs.OnCreateMux((1, None)),
rs.OnNextMux((1, None), 4),
rs.OnCreateMux((2, None)),
rs.OnNextMux((2, None), 8),
rs.OnNextMux((2, None), 8),
rs.OnNextMux((1, None), 10),
rs.OnNextMux((1, None), 10),
rs.OnCompletedMux((1, None)),
rs.OnCompletedMux((2, None)),
]
except Exception as e:
import traceback
traceback.print_tb(e.__traceback__)
raise e
def test_max_mux_reduce():
source = [
rs.OnCreateMux((1 ,None)),
rs.OnNextMux((1, None), 4),
rs.OnCreateMux((2, None)),
rs.OnNextMux((2, None), 8),
rs.OnNextMux((2, None), 6),
rs.OnNextMux((1, None), 10),
rs.OnNextMux((1, None), 3),
rs.OnCompletedMux((1, None)),
rs.OnCompletedMux((2, None)),
]
actual_error = []
actual_completed = []
actual_result = []
def on_completed():
actual_completed.append(True)
rx.from_(source).pipe(
rs.cast_as_mux_observable(),
rs.state.with_memory_store(
rs.math.max(reduce=True),
),
).subscribe(
on_next=actual_result.append,
on_completed=on_completed,
on_error=actual_error.append,
)
assert actual_error == []
assert actual_completed == [True]
assert actual_result == [
rs.OnCreateMux((1 ,None)),
rs.OnCreateMux((2, None)),
rs.OnNextMux((1, None), 10),
rs.OnCompletedMux((1, None)),
rs.OnNextMux((2, None), 8),
rs.OnCompletedMux((2, None)),
]
def test_max_mux_empty_reduce():
source = [
rs.OnCreateMux((1 ,None)),
rs.OnCreateMux((2, None)),
rs.OnCompletedMux((1, None)),
rs.OnCompletedMux((2, None)),
]
actual_error = []
actual_completed = []
actual_result = []
def on_completed():
actual_completed.append(True)
rx.from_(source).pipe(
rs.cast_as_mux_observable(),
rs.state.with_memory_store(
rs.math.max(reduce=True),
),
).subscribe(
on_next=actual_result.append,
on_completed=on_completed,
on_error=actual_error.append,
)
assert actual_error == []
assert actual_completed == [True]
assert actual_result == [
rs.OnCreateMux((1 ,None)),
rs.OnCreateMux((2, None)),
rs.OnNextMux((1, None), None),
rs.OnCompletedMux((1, None)),
rs.OnNextMux((2, None), None),
rs.OnCompletedMux((2, None)),
]
| 24.642157
| 54
| 0.573304
| 612
| 5,027
| 4.501634
| 0.109477
| 0.12196
| 0.03049
| 0.063884
| 0.865699
| 0.848639
| 0.847187
| 0.839201
| 0.783303
| 0.747368
| 0
| 0.029736
| 0.277501
| 5,027
| 203
| 55
| 24.763547
| 0.7288
| 0
| 0
| 0.775758
| 0
| 0
| 0.000796
| 0
| 0
| 0
| 0
| 0
| 0.09697
| 1
| 0.072727
| false
| 0
| 0.024242
| 0
| 0.09697
| 0.012121
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f216ba3d9739e4103b8a0f8821315c3a19eecff9
| 1,081
|
py
|
Python
|
pynitrokey/start/rsa_pub_key.py
|
fayrlight/pynitrokey
|
c6a93da7a811d34213746b60fab22affb3616a88
|
[
"Apache-2.0",
"MIT"
] | 15
|
2020-08-05T14:37:37.000Z
|
2022-02-20T13:47:41.000Z
|
pynitrokey/start/rsa_pub_key.py
|
fayrlight/pynitrokey
|
c6a93da7a811d34213746b60fab22affb3616a88
|
[
"Apache-2.0",
"MIT"
] | 153
|
2020-06-22T13:09:41.000Z
|
2022-03-31T10:25:14.000Z
|
pynitrokey/start/rsa_pub_key.py
|
fayrlight/pynitrokey
|
c6a93da7a811d34213746b60fab22affb3616a88
|
[
"Apache-2.0",
"MIT"
] | 4
|
2021-04-06T07:08:59.000Z
|
2022-02-14T14:26:38.000Z
|
rsa_key_data = [
"9cf7192b51a574d1ad3ccb08ba09b87f228573893eee355529ff243e90fd4b86f79a82097cc7922c0485bed1616b1656a9b0b19ef78ea8ec34c384019adc5d5bf4db2d2a0a2d9cf14277bdcb7056f48b81214e3f7f7742231e29673966f9b1106862112cc798dba8d4a138bb5abfc6d4c12d53a5d39b2f783da916da20852ee139bbafda61d429caf2a4f30847ce7e7ae32ab4061e27dd9e4d00d60910249db8d8559dd85f7ca59659ef400c8f6318700f4e97f0c6f4165de80641490433c88da8682befe68eb311f54af2b07d97ac74edb5399cf054764211694fbb8d1d333f3269f235abe025067f811ff83a2224826219b309ea3e6c968f42b3e52f245dc9",
"010001",
"b5ab7b159220b18e363258f61ebde08bae83d6ce2dbfe4adc143628c527887acde9de09bf9b49f438019004d71855f30c2d69b6c29bb9882ab641b3387409fe9199464a7faa4b5230c56d9e17cd9ed074bc00180ebed62bae3af28e6ff2ac2654ad968834c5d5c88f8d9d3cc5e167b10453b049d4e454a5761fb0ac717185907",
"dd2fffa9814296156a6926cd17b65564187e424dcadce9b032246ad7e46448bb0f9e0ff3c64f987424b1a40bc694e2e9ac4fb1930d163582d7acf20653a1c44b97846c1c5fd8a7b19bb225fb39c30e25410483deaf8c2538d222b748c4d8103b11cec04f666a5c0dbcbf5d5f625f158f65746c3fafe6418145f7cffa5fadeeaf"
]
| 135.125
| 519
| 0.963922
| 7
| 1,081
| 148.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.614583
| 0.023127
| 1,081
| 7
| 520
| 154.428571
| 0.370265
| 0
| 0
| 0
| 0
| 0
| 0.953704
| 0.948148
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
482106af5be38929a1554705a7a6118033090043
| 810
|
py
|
Python
|
nfv/nfv-common/nfv_common/event_log/objects/v1/__init__.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-common/nfv_common/event_log/objects/v1/__init__.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-common/nfv_common/event_log/objects/v1/__init__.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_common.event_log.objects.v1._event_log_data import EventLogData # noqa: F401
from nfv_common.event_log.objects.v1._event_log_data import EventLogStateData # noqa: F401
from nfv_common.event_log.objects.v1._event_log_data import EventLogThresholdData # noqa: F401
from nfv_common.event_log.objects.v1._event_log_defs import EVENT_CONTEXT # noqa: F401
from nfv_common.event_log.objects.v1._event_log_defs import EVENT_ID # noqa: F401
from nfv_common.event_log.objects.v1._event_log_defs import EVENT_IMPORTANCE # noqa: F401
from nfv_common.event_log.objects.v1._event_log_defs import EVENT_INITIATED_BY # noqa: F401
from nfv_common.event_log.objects.v1._event_log_defs import EVENT_TYPE # noqa: F401
| 57.857143
| 95
| 0.825926
| 132
| 810
| 4.719697
| 0.265152
| 0.205457
| 0.166934
| 0.23114
| 0.746388
| 0.746388
| 0.746388
| 0.746388
| 0.746388
| 0.746388
| 0
| 0.057377
| 0.096296
| 810
| 13
| 96
| 62.307692
| 0.793716
| 0.212346
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
485643590eb92f2042359fe887bdbdd996a3a53e
| 145
|
py
|
Python
|
src/models/__init__.py
|
hlydecker/Iterative-Human-and-Automated-Identification-of-Wildlife-Images
|
018dbb24dc42fa7dd1d921fef944802a06b22069
|
[
"BSD-3-Clause"
] | 8
|
2021-10-21T23:51:47.000Z
|
2022-03-14T22:23:13.000Z
|
src/models/__init__.py
|
hlydecker/Iterative-Human-and-Automated-Identification-of-Wildlife-Images
|
018dbb24dc42fa7dd1d921fef944802a06b22069
|
[
"BSD-3-Clause"
] | null | null | null |
src/models/__init__.py
|
hlydecker/Iterative-Human-and-Automated-Identification-of-Wildlife-Images
|
018dbb24dc42fa7dd1d921fef944802a06b22069
|
[
"BSD-3-Clause"
] | 1
|
2021-12-02T04:38:12.000Z
|
2021-12-02T04:38:12.000Z
|
from . import utils
from . import plain_resnet
from . import memory_resnet
from . import memory_resnet_soft_iter
from . import plain_semi_resnet
| 24.166667
| 37
| 0.827586
| 22
| 145
| 5.136364
| 0.409091
| 0.442478
| 0.265487
| 0.389381
| 0.442478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 145
| 5
| 38
| 29
| 0.904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
486db2763a82f12bbad8363cad90ed0dd73c0394
| 4,452
|
py
|
Python
|
tests/unit/plugins/kubernetes/kube_client_test.py
|
Yelp/task_processing
|
73221a5c5300bb46efbbd0aa6a3ae46c82ff6a79
|
[
"Apache-2.0"
] | 27
|
2017-05-17T03:07:14.000Z
|
2022-01-26T05:53:50.000Z
|
tests/unit/plugins/kubernetes/kube_client_test.py
|
Yelp/task_processing
|
73221a5c5300bb46efbbd0aa6a3ae46c82ff6a79
|
[
"Apache-2.0"
] | 64
|
2017-05-13T00:30:14.000Z
|
2021-09-03T21:42:06.000Z
|
tests/unit/plugins/kubernetes/kube_client_test.py
|
Yelp/task_processing
|
73221a5c5300bb46efbbd0aa6a3ae46c82ff6a79
|
[
"Apache-2.0"
] | 6
|
2017-08-22T06:08:06.000Z
|
2021-05-27T00:07:51.000Z
|
import os
from unittest import mock
import pytest
from kubernetes.client.exceptions import ApiException
from task_processing.plugins.kubernetes.kube_client import ExceededMaxAttempts
from task_processing.plugins.kubernetes.kube_client import KubeClient
def test_KubeClient_no_kubeconfig():
with mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_config.load_kube_config",
autospec=True
), mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_client",
autospec=True
), pytest.raises(ValueError):
KubeClient()
def test_KubeClient_kubeconfig_init():
with mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_config.load_kube_config",
autospec=True
), mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_client",
autospec=True
) as mock_kube_client:
client = KubeClient(kubeconfig_path="/some/kube/config.conf")
assert client.core == mock_kube_client.CoreV1Api()
def test_KubeClient_kubeconfig_env_var():
with mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_config.load_kube_config",
autospec=True
), mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_client",
autospec=True
) as mock_kube_client, mock.patch.dict(os.environ, {"KUBECONFIG": "/another/kube/config.conf"}):
client = KubeClient()
assert client.core == mock_kube_client.CoreV1Api()
def test_KubeClient_kubeconfig_init_overrides_env_var():
with mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_config.load_kube_config",
autospec=True
) as mock_load_config, mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_client",
autospec=True
) as mock_kube_client, mock.patch.dict(os.environ, {"KUBECONFIG": "/another/kube/config.conf"}):
mock_config_path = "/OVERRIDE.conf"
client = KubeClient(kubeconfig_path=mock_config_path)
assert client.core == mock_kube_client.CoreV1Api()
mock_load_config.assert_called_once_with(config_file=mock_config_path, context=None)
def test_KubeClient_get_pod_too_many_failures():
with mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_config.load_kube_config",
autospec=True
), mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_client",
autospec=True
) as mock_kube_client, mock.patch.dict(
os.environ, {"KUBECONFIG": "/another/kube/config.conf"}
), pytest.raises(ExceededMaxAttempts):
mock_config_path = "/OVERRIDE.conf"
mock_kube_client.CoreV1Api().read_namespaced_pod.side_effect = [ApiException, ApiException]
client = KubeClient(kubeconfig_path=mock_config_path)
client.get_pod(namespace='ns', pod_name='pod-name', attempts=2)
assert mock_kube_client.CoreV1Api().read_namespaced_pod.call_count == 2
def test_KubeClient_get_pod_unknown_exception():
with mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_config.load_kube_config",
autospec=True
), mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_client",
autospec=True
) as mock_kube_client, mock.patch.dict(
os.environ, {"KUBECONFIG": "/another/kube/config.conf"}
), pytest.raises(Exception):
mock_config_path = "/OVERRIDE.conf"
mock_kube_client.CoreV1Api().read_namespaced_pod.side_effect = [Exception]
client = KubeClient(kubeconfig_path=mock_config_path)
client.get_pod(namespace='ns', pod_name='pod-name', attempts=2)
def test_KubeClient_get_pod():
with mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_config.load_kube_config",
autospec=True
), mock.patch(
"task_processing.plugins.kubernetes.kube_client.kube_client",
autospec=True
) as mock_kube_client, mock.patch.dict(
os.environ, {"KUBECONFIG": "/another/kube/config.conf"}
):
mock_config_path = "/OVERRIDE.conf"
mock_kube_client.CoreV1Api().read_namespaced_pod.return_value = mock.Mock()
client = KubeClient(kubeconfig_path=mock_config_path)
client.get_pod(namespace='ns', pod_name='pod-name', attempts=1)
mock_kube_client.CoreV1Api().read_namespaced_pod.assert_called_once_with(
namespace='ns', name='pod-name'
)
| 39.052632
| 100
| 0.724394
| 545
| 4,452
| 5.601835
| 0.126606
| 0.121192
| 0.110056
| 0.162463
| 0.833934
| 0.814936
| 0.799541
| 0.746151
| 0.712742
| 0.712742
| 0
| 0.003249
| 0.170485
| 4,452
| 113
| 101
| 39.39823
| 0.82345
| 0
| 0
| 0.666667
| 0
| 0
| 0.274933
| 0.242138
| 0
| 0
| 0
| 0
| 0.064516
| 1
| 0.075269
| false
| 0
| 0.064516
| 0
| 0.139785
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6fa91a8008697b5f360825648e06c0242cc46cf1
| 14,740
|
py
|
Python
|
ironic_tempest_plugin/tests/scenario/ironic_standalone/test_basic_ops.py
|
ameya-r/ironic-tempest-plugin
|
d3360cf3b6ad8b89b9c80fc806dc5d4ba373dd01
|
[
"Apache-2.0"
] | 9
|
2016-11-20T08:00:27.000Z
|
2019-01-28T22:03:31.000Z
|
ironic_tempest_plugin/tests/scenario/ironic_standalone/test_basic_ops.py
|
ameya-r/ironic-tempest-plugin
|
d3360cf3b6ad8b89b9c80fc806dc5d4ba373dd01
|
[
"Apache-2.0"
] | 2
|
2018-12-07T11:14:14.000Z
|
2022-01-19T10:25:28.000Z
|
ironic_tempest_plugin/tests/scenario/ironic_standalone/test_basic_ops.py
|
ameya-r/ironic-tempest-plugin
|
d3360cf3b6ad8b89b9c80fc806dc5d4ba373dd01
|
[
"Apache-2.0"
] | 7
|
2017-12-11T18:07:47.000Z
|
2021-10-21T05:07:02.000Z
|
#
# Copyright 2017 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.common import utils
from tempest import config
from tempest.lib import decorators
from ironic_tempest_plugin.tests.scenario import \
baremetal_standalone_manager as bsm
CONF = config.CONF
class BaremetalAgentIpmitoolWholedisk(bsm.BaremetalStandaloneScenarioTest):
driver = 'agent_ipmitool'
image_ref = CONF.baremetal.whole_disk_image_ref
wholedisk_image = True
@decorators.idempotent_id('defff515-a6ff-44f6-9d8d-2ded51196d98')
@utils.services('image', 'network', 'object_storage')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalAgentIpmitoolWholediskHttpLink(
bsm.BaremetalStandaloneScenarioTest):
driver = 'agent_ipmitool'
image_ref = CONF.baremetal.whole_disk_image_url
image_checksum = CONF.baremetal.whole_disk_image_checksum
wholedisk_image = True
@classmethod
def skip_checks(cls):
super(BaremetalAgentIpmitoolWholediskHttpLink, cls).skip_checks()
if not CONF.baremetal_feature_enabled.ipxe_enabled:
skip_msg = ("HTTP server is not available when ipxe is disabled.")
raise cls.skipException(skip_msg)
@decorators.idempotent_id('d926c683-1a32-44df-afd0-e60134346fd0')
@utils.services('network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalAgentIpmitoolPartitioned(bsm.BaremetalStandaloneScenarioTest):
driver = 'agent_ipmitool'
image_ref = CONF.baremetal.partition_image_ref
wholedisk_image = False
@decorators.idempotent_id('27b86130-d8dc-419d-880a-fbbbe4ce3f8c')
@utils.services('image', 'network', 'object_storage')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalPxeIpmitoolWholedisk(bsm.BaremetalStandaloneScenarioTest):
driver = 'pxe_ipmitool'
image_ref = CONF.baremetal.whole_disk_image_ref
wholedisk_image = True
@decorators.idempotent_id('d8c5badd-45db-4d05-bbe8-35babbed6e86')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalPxeIpmitoolWholediskHttpLink(
bsm.BaremetalStandaloneScenarioTest):
driver = 'pxe_ipmitool'
image_ref = CONF.baremetal.whole_disk_image_url
image_checksum = CONF.baremetal.whole_disk_image_checksum
wholedisk_image = True
@classmethod
def skip_checks(cls):
super(BaremetalPxeIpmitoolWholediskHttpLink, cls).skip_checks()
if not CONF.baremetal_feature_enabled.ipxe_enabled:
skip_msg = ("HTTP server is not available when ipxe is disabled.")
raise cls.skipException(skip_msg)
@decorators.idempotent_id('71ccf06f-6765-40fd-8252-1b1bfa423b9b')
@utils.services('network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalPxeIpmitoolPartitioned(bsm.BaremetalStandaloneScenarioTest):
driver = 'pxe_ipmitool'
image_ref = CONF.baremetal.partition_image_ref
wholedisk_image = False
@decorators.idempotent_id('ea85e19c-6869-4577-b9bb-2eb150f77c90')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalDriverIscsiWholedisk(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
if 'redfish' in CONF.baremetal.enabled_hardware_types:
driver = 'redfish'
else:
driver = 'ipmi'
deploy_interface = 'iscsi'
image_ref = CONF.baremetal.whole_disk_image_ref
wholedisk_image = True
@classmethod
def skip_checks(cls):
super(BaremetalDriverIscsiWholedisk, cls).skip_checks()
if cls.driver == 'redfish':
skip_msg = ("Test covered when using ipmi")
raise cls.skipException(skip_msg)
@decorators.idempotent_id('f25b71df-2150-45d7-a780-7f5b07124808')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalDriverDirectWholedisk(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
if 'redfish' in CONF.baremetal.enabled_hardware_types:
driver = 'redfish'
else:
driver = 'ipmi'
deploy_interface = 'direct'
image_ref = CONF.baremetal.whole_disk_image_ref
wholedisk_image = True
@classmethod
def skip_checks(cls):
super(BaremetalDriverDirectWholedisk, cls).skip_checks()
if cls.driver == 'ipmi':
skip_msg = ("Test covered when using redfish")
raise cls.skipException(skip_msg)
@decorators.idempotent_id('c2db24e7-07dc-4a20-8f93-d4efae2bfd4e')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalDriverIscsiPartitioned(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
if 'redfish' in CONF.baremetal.enabled_hardware_types:
driver = 'redfish'
else:
driver = 'ipmi'
deploy_interface = 'iscsi'
image_ref = CONF.baremetal.partition_image_ref
wholedisk_image = False
boot_option = 'netboot' if CONF.baremetal.partition_netboot else 'local'
@classmethod
def skip_checks(cls):
super(BaremetalDriverIscsiPartitioned, cls).skip_checks()
if cls.driver == 'ipmi':
skip_msg = ("Test covered when using redfish")
raise cls.skipException(skip_msg)
@decorators.idempotent_id('7d0b205e-edbc-4e2d-9f6d-95cd74eefecb')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalDriverDirectPartitioned(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
if 'redfish' in CONF.baremetal.enabled_hardware_types:
driver = 'redfish'
else:
driver = 'ipmi'
deploy_interface = 'direct'
image_ref = CONF.baremetal.partition_image_ref
wholedisk_image = False
boot_option = 'netboot' if CONF.baremetal.partition_netboot else 'local'
@classmethod
def skip_checks(cls):
super(BaremetalDriverDirectPartitioned, cls).skip_checks()
if cls.driver == 'redfish':
skip_msg = ("Test covered when using ipmi")
raise cls.skipException(skip_msg)
@decorators.idempotent_id('7b4b2dcd-2bbb-44f5-991f-0964300af6b7')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalDriverAnsibleWholedisk(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
if 'redfish' in CONF.baremetal.enabled_hardware_types:
driver = 'redfish'
else:
driver = 'ipmi'
deploy_interface = 'ansible'
image_ref = CONF.baremetal.whole_disk_image_ref
wholedisk_image = True
@decorators.idempotent_id('cde532cc-81ba-4489-b374-b4a85cc203eb')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalIpmiRescueWholedisk(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.38'
min_microversion = '1.38'
driver = 'ipmi'
rescue_interface = 'agent'
image_ref = CONF.baremetal.whole_disk_image_ref
wholedisk_image = True
# NOTE(tiendc) Using direct deploy interface and a whole disk
# image may lead to the bug:
# https://bugs.launchpad.net/ironic/+bug/1750958
# This is a workaround by using iscsi deploy interface.
deploy_interface = 'iscsi'
@decorators.idempotent_id('d6a1780f-c4bb-4136-8144-29e822e14d66')
@utils.services('image', 'network')
def test_rescue_mode(self):
self.set_node_to_active(self.image_ref)
self.rescue_unrescue()
class BaremetalIpmiRescuePartitioned(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.38'
min_microversion = '1.38'
driver = 'ipmi'
rescue_interface = 'agent'
image_ref = CONF.baremetal.partition_image_ref
wholedisk_image = False
boot_option = 'netboot' if CONF.baremetal.partition_netboot else 'local'
# NOTE(jroll) the ansible deploy interface doesn't support partition images
# with netboot mode. Since that's what is happening here, explicitly choose
# a deploy interface to be sure we don't end up with a node using the
# ansible interface here.
deploy_interface = 'iscsi'
@decorators.idempotent_id('113acd0a-9872-4631-b3ee-54da7e3bb262')
@utils.services('image', 'network')
def test_rescue_mode(self):
self.set_node_to_active(self.image_ref)
self.rescue_unrescue()
class BaremetalIloDirectWholediskHttpLink(
bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'ilo'
deploy_interface = 'direct'
boot_interface = 'ilo-virtual-media'
image_ref = CONF.baremetal.whole_disk_image_url
image_checksum = CONF.baremetal.whole_disk_image_checksum
wholedisk_image = True
@decorators.idempotent_id('c2db24e7-b9bb-44df-6765-e60134346fd0')
@utils.services('network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalIloDirectPartitioned(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'ilo'
deploy_interface = 'direct'
boot_interface = 'ilo-virtual-media'
image_ref = CONF.baremetal.partition_image_ref
wholedisk_image = False
@decorators.idempotent_id('ea85e19c-d8dc-4577-4d05-fbbbe4ce3f8c')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalIloIscsiWholediskHttpLink(
bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'ilo'
deploy_interface = 'iscsi'
boot_interface = 'ilo-virtual-media'
image_ref = CONF.baremetal.whole_disk_image_url
image_checksum = CONF.baremetal.whole_disk_image_checksum
wholedisk_image = True
@decorators.idempotent_id('71ccf06f-45db-8f93-afd0-d4efae2bfd4e')
@utils.services('network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalIloIscsiPartitioned(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'ilo'
deploy_interface = 'iscsi'
boot_interface = 'ilo-virtual-media'
image_ref = CONF.baremetal.partition_image_ref
wholedisk_image = False
@decorators.idempotent_id('d926c683-4d05-8252-b9bb-35babbed6e86')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalIloPxeWholediskHttpLink(
bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'ilo'
deploy_interface = 'direct'
boot_interface = 'ilo-pxe'
image_ref = CONF.baremetal.whole_disk_image_url
image_checksum = CONF.baremetal.whole_disk_image_checksum
wholedisk_image = True
@decorators.idempotent_id('d926c683-1a32-edbc-07dc-95cd74eefecb')
@utils.services('network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalIloPxePartitioned(bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'ilo'
deploy_interface = 'direct'
boot_interface = 'ilo-pxe'
image_ref = CONF.baremetal.partition_image_ref
wholedisk_image = False
@decorators.idempotent_id('71ccf06f-07dc-4577-6869-1b1bfa423b9b')
@utils.services('image', 'network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalIloIPxeWholediskHttpLink(
bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'ilo'
deploy_interface = 'direct'
boot_interface = 'ilo-ipxe'
image_ref = CONF.baremetal.whole_disk_image_url
image_checksum = CONF.baremetal.whole_disk_image_checksum
wholedisk_image = True
@decorators.idempotent_id('d926c683-1a32-edbc-07dc-95cd74eefecb')
@utils.services('network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalIlo5UefiHTTPSWholediskHttpsLink(
bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'ilo5'
deploy_interface = 'direct'
boot_interface = 'ilo-uefi-https'
image_ref = CONF.baremetal.whole_disk_image_url
image_checksum = CONF.baremetal.whole_disk_image_checksum
wholedisk_image = True
@decorators.idempotent_id('d926c683-1a32-edbc-07dc-95cd74eefecb')
@utils.services('network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalRedfishDirectWholediskHttpLink(
bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'redfish'
deploy_interface = 'direct'
boot_interface = 'redfish-virtual-media'
image_ref = CONF.baremetal.whole_disk_image_url
image_checksum = CONF.baremetal.whole_disk_image_checksum
wholedisk_image = True
@decorators.idempotent_id('113acd0a-9872-4631-b3ee-54da7e3bb262')
@utils.services('network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
class BaremetalRedfishIPxeWholediskHttpLink(
bsm.BaremetalStandaloneScenarioTest):
api_microversion = '1.31' # to set the deploy_interface
driver = 'redfish'
deploy_interface = 'direct'
boot_interface = 'ipxe'
image_ref = CONF.baremetal.whole_disk_image_url
image_checksum = CONF.baremetal.whole_disk_image_checksum
wholedisk_image = True
@decorators.idempotent_id('113acd0a-9872-4631-b3ee-54da7e3bb262')
@utils.services('network')
def test_ip_access_to_server(self):
self.boot_and_verify_node()
| 33.57631
| 79
| 0.733175
| 1,719
| 14,740
| 6.026178
| 0.159395
| 0.052708
| 0.033787
| 0.05097
| 0.771213
| 0.767738
| 0.758181
| 0.758181
| 0.758181
| 0.749686
| 0
| 0.040172
| 0.177544
| 14,740
| 438
| 80
| 33.652968
| 0.81432
| 0.094912
| 0
| 0.814103
| 0
| 0
| 0.142224
| 0.06382
| 0
| 0
| 0
| 0
| 0
| 1
| 0.092949
| false
| 0
| 0.012821
| 0
| 0.576923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
6ff6f48d064f3a947dea8f26769789fe8422c137
| 2,909
|
py
|
Python
|
tests/acceptance/test_MainClient_resourceIterator.py
|
rmetcalf9/EllucainEthosPythonClient
|
6913322b1e583f655f67399f2baa763833583c27
|
[
"MIT"
] | 1
|
2021-02-09T22:05:50.000Z
|
2021-02-09T22:05:50.000Z
|
tests/acceptance/test_MainClient_resourceIterator.py
|
rmetcalf9/EllucainEthosPythonClient
|
6913322b1e583f655f67399f2baa763833583c27
|
[
"MIT"
] | 1
|
2020-07-02T11:44:54.000Z
|
2020-07-02T11:45:38.000Z
|
tests/acceptance/test_MainClient_resourceIterator.py
|
rmetcalf9/EllucainEthosPythonClient
|
6913322b1e583f655f67399f2baa763833583c27
|
[
"MIT"
] | 1
|
2021-01-13T21:35:11.000Z
|
2021-01-13T21:35:11.000Z
|
# Tests on main client object
import TestHelperSuperClass
import EllucianEthosPythonClient
import base64
import json
import TestingHelper
import queue
class helpers(TestHelperSuperClass.testClassWithHelpers):
pass
@TestHelperSuperClass.wipd
class test_MainClient_resourceIterator(helpers):
def test_singlePage(self):
personGUID="personGUID"
singlePersonResponseDict, _, _ = TestingHelper.getPersonMockResult(personGUID=personGUID, version="6")
data=None
mockResponse=[singlePersonResponseDict, singlePersonResponseDict, singlePersonResponseDict, singlePersonResponseDict, singlePersonResponseDict]
self.ethosClient.mock.registerNextResponse(
reqFnName="get",
url="/api/persons?limit=9&offset=5",
data=data,
status_code=200,
contentBytes=base64.b64encode(json.dumps([]).encode()),
contentHeaders={
"x-hedtech-media-type": "application/vnd.hedtech.integration.v6+json"
},
ignoreData=False
)
self.ethosClient.mock.registerNextResponse(
reqFnName="get",
url="/api/persons?limit=9&offset=0",
data=data,
status_code=200,
contentBytes=base64.b64encode(json.dumps(mockResponse).encode()),
contentHeaders={
"x-hedtech-media-type": "application/vnd.hedtech.integration.v6+json"
},
ignoreData=False
)
personIterator = self.ethosClient.getResourceIterator(
loginSession=None,
resourceName="persons",
version=None,
pageSize=9
)
cur = 0
for curPerson in personIterator:
cur += 1
self.assertEqual(cur,5)
def test_responseWithXMediaTypeHeaderVersionHeader(self):
personGUID="personGUID"
singlePersonResponseDict, _, _ = TestingHelper.getPersonMockResult(personGUID=personGUID, version="6")
data=None
mockResponse=[singlePersonResponseDict, singlePersonResponseDict, singlePersonResponseDict, singlePersonResponseDict, singlePersonResponseDict]
self.ethosClient.mock.registerNextResponse(
reqFnName="get",
url="/api/persons?limit=9&offset=5",
data=data,
status_code=200,
contentBytes=base64.b64encode(json.dumps([]).encode()),
contentHeaders={
"x-media-type": "application/vnd.hedtech.integration.v6+json"
},
ignoreData=False
)
self.ethosClient.mock.registerNextResponse(
reqFnName="get",
url="/api/persons?limit=9&offset=0",
data=data,
status_code=200,
contentBytes=base64.b64encode(json.dumps(mockResponse).encode()),
contentHeaders={
"x-media-type": "application/vnd.hedtech.integration.v6+json"
},
ignoreData=False
)
personIterator = self.ethosClient.getResourceIterator(
loginSession=None,
resourceName="persons",
version=None,
pageSize=9
)
cur = 0
for curPerson in personIterator:
cur += 1
self.assertEqual(cur,5)
| 29.683673
| 147
| 0.704366
| 267
| 2,909
| 7.629213
| 0.277154
| 0.188513
| 0.212077
| 0.188513
| 0.843397
| 0.843397
| 0.843397
| 0.843397
| 0.843397
| 0.843397
| 0
| 0.022006
| 0.187693
| 2,909
| 97
| 148
| 29.989691
| 0.840034
| 0.009282
| 0
| 0.738095
| 0
| 0
| 0.138889
| 0.1
| 0
| 0
| 0
| 0
| 0.02381
| 1
| 0.02381
| false
| 0.011905
| 0.071429
| 0
| 0.119048
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6fff9444572ab0e25376411f6cbd950472efc556
| 155
|
py
|
Python
|
turterra/__init__.py
|
BTheDragonMaster/turterra
|
2498397e930fb1e384fb39a2e5ef4b2c42636ce7
|
[
"MIT"
] | null | null | null |
turterra/__init__.py
|
BTheDragonMaster/turterra
|
2498397e930fb1e384fb39a2e5ef4b2c42636ce7
|
[
"MIT"
] | null | null | null |
turterra/__init__.py
|
BTheDragonMaster/turterra
|
2498397e930fb1e384fb39a2e5ef4b2c42636ce7
|
[
"MIT"
] | null | null | null |
from turterra import app_callbacks, app_layout
from turterra.utils.compound_utility import Compound
__all__ = ["app_callbacks", "app_layout", "Compound"]
| 31
| 53
| 0.812903
| 20
| 155
| 5.85
| 0.5
| 0.205128
| 0.25641
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 155
| 4
| 54
| 38.75
| 0.835714
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d2309343fcb926bc42c2d069794d54825a4938ae
| 4,376
|
py
|
Python
|
tests/krs/test_apps.py
|
WIPACrepo/keycloak-rest-services
|
2661b0db2dd320bdb8eefc62c805188bec52ecc7
|
[
"MIT"
] | 1
|
2021-09-23T14:39:36.000Z
|
2021-09-23T14:39:36.000Z
|
tests/krs/test_apps.py
|
WIPACrepo/keycloak-rest-services
|
2661b0db2dd320bdb8eefc62c805188bec52ecc7
|
[
"MIT"
] | 38
|
2020-08-31T22:53:09.000Z
|
2022-03-28T20:55:39.000Z
|
tests/krs/test_apps.py
|
WIPACrepo/keycloak-rest-services
|
2661b0db2dd320bdb8eefc62c805188bec52ecc7
|
[
"MIT"
] | null | null | null |
import os
import pytest
from krs.token import get_token
from krs import users,groups,apps
from ..util import keycloak_bootstrap
@pytest.mark.asyncio
async def test_list_apps_empty(keycloak_bootstrap):
ret = await apps.list_apps(rest_client=keycloak_bootstrap)
assert ret == {}
@pytest.mark.asyncio
async def test_list_apps(keycloak_bootstrap):
await apps.create_app('testapp', 'http://url', rest_client=keycloak_bootstrap)
ret = await apps.list_apps(rest_client=keycloak_bootstrap)
assert list(ret.keys()) == ['testapp']
assert ret['testapp']['rootUrl'] == 'http://url'
@pytest.mark.asyncio
async def test_app_info(keycloak_bootstrap):
await apps.create_app('testapp', 'http://url', rest_client=keycloak_bootstrap)
ret = await apps.app_info('testapp', rest_client=keycloak_bootstrap)
assert ret['clientId'] == 'testapp'
assert ret['rootUrl'] == 'http://url'
assert 'clientSecret' in ret
assert 'roles' in ret
assert set(ret['roles']) == set(['read','write'])
@pytest.mark.asyncio
async def test_create_app(keycloak_bootstrap):
await apps.create_app('testapp', 'http://url', rest_client=keycloak_bootstrap)
@pytest.mark.asyncio
async def test_delete_app(keycloak_bootstrap):
await apps.create_app('testapp', 'http://url', rest_client=keycloak_bootstrap)
await apps.delete_app('testapp', rest_client=keycloak_bootstrap)
@pytest.mark.asyncio
async def test_get_app_role_mappings_empty(keycloak_bootstrap):
await apps.create_app('testapp', 'http://url', rest_client=keycloak_bootstrap)
ret = await apps.get_app_role_mappings('testapp', rest_client=keycloak_bootstrap)
assert ret == {}
with pytest.raises(Exception):
await apps.get_app_role_mappings('testapp', role='badrole', rest_client=keycloak_bootstrap)
@pytest.mark.asyncio
async def test_add_app_role_mapping(keycloak_bootstrap):
await apps.create_app('testapp', 'http://url', rest_client=keycloak_bootstrap)
with pytest.raises(Exception):
await apps.add_app_role_mapping('testapp', role='badrole', group='/badgroup', rest_client=keycloak_bootstrap)
with pytest.raises(Exception):
await apps.add_app_role_mapping('testapp', role='read', group='/badgroup', rest_client=keycloak_bootstrap)
await groups.create_group('/testgroup', rest_client=keycloak_bootstrap)
await apps.add_app_role_mapping('testapp', role='read', group='/testgroup', rest_client=keycloak_bootstrap)
ret = await apps.get_app_role_mappings('testapp', rest_client=keycloak_bootstrap)
assert ret == {'read': ['/testgroup']}
@pytest.mark.asyncio
async def test_delete_app_role_mapping(keycloak_bootstrap):
await apps.create_app('testapp', 'http://url', rest_client=keycloak_bootstrap)
await groups.create_group('/testgroup', rest_client=keycloak_bootstrap)
await apps.add_app_role_mapping('testapp', role='read', group='/testgroup', rest_client=keycloak_bootstrap)
with pytest.raises(Exception):
await apps.delete_app_role_mapping('testapp', role='badrole', group='/badgroup', rest_client=keycloak_bootstrap)
with pytest.raises(Exception):
await apps.delete_app_role_mapping('testapp', role='read', group='/badgroup', rest_client=keycloak_bootstrap)
await apps.delete_app_role_mapping('testapp', role='read', group='/testgroup', rest_client=keycloak_bootstrap)
ret = await apps.get_app_role_mappings('testapp', rest_client=keycloak_bootstrap)
assert ret == {}
@pytest.mark.asyncio
async def test_get_public_token(keycloak_bootstrap):
await apps.create_app('testapp', 'http://url', rest_client=keycloak_bootstrap)
await groups.create_group('/testgroup', rest_client=keycloak_bootstrap)
await apps.add_app_role_mapping('testapp', role='read', group='/testgroup', rest_client=keycloak_bootstrap)
await users.create_user('testuser', 'test', 'user', 'test@user', rest_client=keycloak_bootstrap)
await users.set_user_password('testuser', 'foo', rest_client=keycloak_bootstrap)
await groups.add_user_group('/testgroup', 'testuser', rest_client=keycloak_bootstrap)
url = f'{os.environ["KEYCLOAK_URL"]}/auth/realms/{os.environ["KEYCLOAK_REALM"]}'
ret = apps.get_public_token(username='testuser', password='foo', scopes=['testapp'], openid_url=url)
assert ret['scope'] == 'testapp'
assert ret['roles'] == {'testapp': ['read']}
| 45.113402
| 120
| 0.752514
| 576
| 4,376
| 5.440972
| 0.114583
| 0.216975
| 0.172304
| 0.258456
| 0.803446
| 0.801213
| 0.749202
| 0.721123
| 0.68411
| 0.68411
| 0
| 0
| 0.113803
| 4,376
| 96
| 121
| 45.583333
| 0.808149
| 0
| 0
| 0.493151
| 0
| 0
| 0.151737
| 0.016225
| 0
| 0
| 0
| 0
| 0.178082
| 1
| 0
| false
| 0.027397
| 0.068493
| 0
| 0.068493
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d23257a074ee43c605abe7257a8e906ebe9abc3a
| 230
|
py
|
Python
|
mowgli_etl/model/kg_path.py
|
tetherless-world/mowgli
|
28c19eba41e03e053ae4addff56a313d926e18d7
|
[
"MIT"
] | 4
|
2021-01-15T15:36:23.000Z
|
2021-09-01T06:52:05.000Z
|
mowgli_etl/model/kg_path.py
|
tetherless-world/mowgli
|
28c19eba41e03e053ae4addff56a313d926e18d7
|
[
"MIT"
] | 63
|
2020-05-04T13:48:04.000Z
|
2020-06-06T02:32:58.000Z
|
mowgli_etl/model/kg_path.py
|
tetherless-world/mowgli-etl
|
28c19eba41e03e053ae4addff56a313d926e18d7
|
[
"MIT"
] | null | null | null |
from typing import NamedTuple, Tuple
class KgPath(NamedTuple):
id: str
# A sequence of subject, predicate, object/subject, predicate, object/subject, ..., object
path: Tuple[str, ...]
source_ids: Tuple[str, ...]
| 25.555556
| 94
| 0.673913
| 28
| 230
| 5.5
| 0.642857
| 0.207792
| 0.285714
| 0.376623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191304
| 230
| 8
| 95
| 28.75
| 0.827957
| 0.382609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
962a030b952ddd540b23e2e782042cd641732336
| 41,146
|
py
|
Python
|
test/tests.py
|
drsagitn/sejonggo
|
5ab690426eda49efe263d93db26e74556066f41b
|
[
"MIT"
] | 1
|
2021-04-20T23:00:09.000Z
|
2021-04-20T23:00:09.000Z
|
test/tests.py
|
drsagitn/sejonggo
|
5ab690426eda49efe263d93db26e74556066f41b
|
[
"MIT"
] | null | null | null |
test/tests.py
|
drsagitn/sejonggo
|
5ab690426eda49efe263d93db26e74556066f41b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
sys.path.append("..")
from conf import conf
conf['SIZE'] = 9 # Override settings for tests
conf['KOMI'] = 5.5 # Override settings for tests
import unittest
import numpy as np
from simulation_workers import init_simulation_workers, destroy_simulation_workers
import os
from play import (
color_board, _get_points, capture_group, make_play, legal_moves,
index2coord, game_init, get_liberties, tree_depth
)
from self_play import (
play_game, simulate,
)
from symmetry import (
_id,
left_diagonal, reverse_left_diagonal,
right_diagonal, reverse_right_diagonal,
vertical_axis, reverse_vertical_axis,
horizontal_axis, reverse_horizontal_axis,
rotation_90, reverse_rotation_90,
rotation_180, reverse_rotation_180,
rotation_270, reverse_rotation_270,
)
import itertools
from sgfsave import save_game_sgf
import pdb
class DummyModel(object):
name = "dummy_model"
def predict(self, X):
policies, values = self.predict_on_batch(X)
return policies[0], values[0]
def predict_on_batch(self, X):
size = conf['SIZE']
batch_size = X.shape[0]
policy = np.zeros((batch_size, size * size + 1), dtype=np.float32)
value = np.zeros((batch_size, 1), dtype=np.float32)
for i in range(batch_size):
policy[i,:] = list(reversed(range(1, size*size + 2)))
policy[:,:] /= np.sum(policy, axis=1)[:,np.newaxis]
value[:, :] = 1
return policy, value
class TestGoMethods(unittest.TestCase):
def assertEqualList(self, arr1, arr2):
self.assertEqual(arr1.tolist(), arr2.tolist())
def test_coloring_player_1(self):
board = np.array(
[[1, 1, 1],
[1, 0, 1],
[1, 1, 1]])
target = np.array(
[[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
self.assertEqualList(color_board(board, 1), target)
board = np.array(
[[1, 1, 1, -1, -1, -1],
[1, 0, 1, -1, 0, -1],
[1, 1, 1, -1, -1, -1]])
target = np.array(
[[1, 1, 1, -1, -1, -1],
[1, 1, 1, -1, 0, -1],
[1, 1, 1, -1, -1, -1]])
self.assertEqualList(color_board(board, 1), target)
def test_player_1_big(self):
board = np.array([
[0, 0, 0, 1, 0, -1, 0, 0, 0,],
[0, 0, 0, 1, 0, -1, 0, 0, 0,],
[0, 0, 0, 1, 0, -1, 0, 0, 0,],
[0, 0, 0, 1, -1, 0, 0, -1, 0,],
[1, 1, 1, -1, 0, -1, -1, 0, 0,],
[0, 0, 0, 1, -1, 0, 0, -1, -1,],
[0, 0, 0, 1, 0, -1, 0, 0, 0,],
[0, 0, 0, 1, 0, -1, 0, 1, 0,],
[0, 0, 0, 0, 0, -1, 0, 0, 0,],
])
target = np.array([
[1, 1, 1, 2, 0, -2, -1, -1, -1,],
[1, 1, 1, 2, 0, -2, -1, -1, -1,],
[1, 1, 1, 2, 0, -2, -1, -1, -1,],
[1, 1, 1, 2, -2, -1, -1, -2, -1,],
[2, 2, 2, -2, -1, -2, -2, -1, -1,],
[0, 0, 0, 2, -2, 0, 0, -2, -2,],
[0, 0, 0, 2, 0, -2, 0, 0, 0,],
[0, 0, 0, 2, 0, -2, 0, 2, 0,],
[0, 0, 0, 0, 0, -2, 0, 0, 0,],
])
colored1 = color_board(board, 1)
colored2 = color_board(board, -1)
total = colored1 + colored2
self.assertEqualList(total, target)
def test_coloring_player_2(self):
board = np.array(
[[1, 1, 1],
[1, 0, 1],
[1, 1, 1]])
target = np.array(
[[1, 1, 1],
[1, 0, 1],
[1, 1, 1]])
self.assertEqualList(color_board(board, -1), target)
board = np.array(
[[1, 1, 1, -1, -1, -1],
[1, 0, 1, -1, 0, -1],
[1, 1, 1, -1, -1, -1]])
target = np.array(
[[1, 1, 1, -1, -1, -1],
[1, 0, 1, -1, -1, -1],
[1, 1, 1, -1, -1, -1]])
self.assertEqualList(color_board(board, -1), target)
def test_get_winner(self):
board = np.array([
[0, 0, 0, 1, 0, -1, 0, 0, 0,],
[0, 0, 0, 1, 0, -1, 0, 0, 0,],
[0, 0, 0, 1, 0, -1, 0, 0, 0,],
[0, 0, 0, 1, -1, 0, 0, -1, 0,],
[1, 1, 1, -1, 0, -1, -1, 0, 0,],
[0, 0, 0, 1, -1, 0, 0, -1, -1,],
[0, 0, 0, 1, 0, -1, 0, 0, 0,],
[0, 0, 0, 1, 0, -1, 0, 1, 0,],
[0, 0, 0, 0, 0, -1, 0, 0, 0,],
])
self.assertEqual(_get_points(board), {0: 29, 1: 12, 2: 11, -1: 15, -2: 14})
def test_taking_stones(self):
board = np.array(
[[0, 1, 0],
[1,-1, 1],
[0, 1, 0]])
target_group = [(1, 1)]
group = capture_group(1, 1, board)
self.assertEqual(group, target_group)
def test_taking_group_stones(self):
board = np.array(
[[0, 1, 0],
[1,-1, 1],
[1,-1, 1],
[0, 1, 0]])
target_group = [(1, 1), (1, 2)]
group = capture_group(1, 1, board)
self.assertEqual(group, target_group)
target_group = [(1, 2), (1, 1)]
group = capture_group(1, 2, board)
self.assertEqual(group, target_group)
def test_taking_group_stones_sides(self):
board = np.array(
[[-1, 1, 0],
[ 1, 0, 0],
[ 0, 0, 0]])
target_group = [(0, 0)]
group = capture_group(0, 0, board)
self.assertEqual(group, target_group)
board = np.array(
[[-1,-1, 1],
[ 1, 1, 0],
[ 0, 0, 0]])
target_group = [(0, 0), (1, 0)]
group = capture_group(0, 0, board)
self.assertEqual(group, target_group)
target_group = [(1, 0), (0, 0)]
group = capture_group(1, 0, board)
self.assertEqual(group, target_group)
def test_taking_group_sucide(self):
board = np.array(
[[-1, 1, 0],
[ 1, 0, 0],
[ 0, 0, 0]])
target_group = [(0, 0)]
group = capture_group(0, 0, board)
self.assertEqual(group, target_group)
board = np.array(
[[-1,-1, 1],
[ 1, 1, 0],
[ 0, 0, 0]])
target_group = [(0, 0), (1, 0)]
group = capture_group(0, 0, board)
self.assertEqual(group, target_group)
target_group = [(1, 0), (0, 0)]
group = capture_group(1, 0, board)
self.assertEqual(group, target_group)
def test_circle_group(self):
board = np.array(
[[ 0, 1, 1, 1, 0],
[ 1,-1,-1,-1, 1],
[ 1,-1, 1,-1, 1],
[ 1,-1,-1,-1, 1],
[ 0, 1, 1, 1, 0]])
target_group = [(1, 1), (2, 1), (3, 1), (3, 2), (3, 3), (2, 3), (1, 3), (1, 2)]
self.assertEqual(len(target_group), 8)
for x, y in target_group:
group = capture_group(x, y, board)
self.assertEqual(sorted(group), sorted(target_group))
class TestBoardMethods(unittest.TestCase):
def test_get_liberties(self):
board, player = game_init()
make_play(0, 0, board) # black
make_play(1, 0, board) # white
make_play(8, 9, board) # black random
make_play(2, 1, board) # white
make_play(8, 8, board) # black random pos
make_play(3, 0, board) # white
make_play(2, 0, board) # black
# ○ ● . ● . .
# . . ● . . .
# . . . . . .
tmp = get_liberties(2, 0, board, 1)
self.assertEqual(len(tmp),0)
tmp = get_liberties(2, 0, board, -1)
self.assertEqual(len(tmp), 4)
board, player = game_init()
make_play(2, 1, board) # white
make_play(2, 0, board) # black
make_play(3, 1, board) # white
make_play(1, 1, board) # black
make_play(4, 1, board, -1) # white
make_play(2, 2, board, -1) # white
# . . ○ . . .
# . ○ ● ● ○ .
# . . ○ . . .
# . . . . . .
tmp = get_liberties(2, 1, board, 1)
self.assertEqual(len(tmp), 2)
tmp = get_liberties(3, 1, board, 1)
self.assertEqual(len(tmp), 2)
def test_self_sucide(self):
board, player = game_init()
make_play(0, 0, board) # black
make_play(1, 0, board) # white
make_play(8, 9, board) # black random
make_play(2, 1, board) # white
make_play(8, 8, board) # black random pos
make_play(3, 0, board) # white
# ○ ● . ● . .
# . . ● . . .
# . . . . . .
make_play(2, 0, board) # black sucides
self.assertEqual(board[0][0][1][0], 1) # white stone
self.assertEqual(board[0][0][1][1], 0) # was not taken
self.assertEqual(board[0][0][2][0], 0) # black stone
self.assertEqual(board[0][0][2][1], 0) # was taken
def test_legal_moves_not_suicide(self):
board, player = game_init()
make_play(0, 0, board) # black
make_play(1, 0, board) # white
make_play(1, 1, board) # black
make_play(2, 1, board) # white
make_play(8, 8, board) # black random pos
make_play(3, 0, board) # white
# ○ ● . ● . .
# . ○ ● . . .
# . . . . . .
mask = legal_moves(board)
self.assertEqual(mask[2], False) # not a suicide when capture other stones
def test_legal_moves_suicide(self):
board, player = game_init()
make_play(0, 1, board) # black
make_play(1, 0, board) # white
make_play(1, 1, board) # black
make_play(2, 1, board) # white
make_play(8, 8, board) # black random pos
make_play(3, 0, board) # white
# . ● . ● . .
# ○ ○ ● . . .
# . . . . . .
mask = legal_moves(board)
self.assertEqual(mask[2], True) # suicide move should be illegal
def test_legal_moves_suicide2(self):
board, player = game_init()
make_play(3, 0, board) # black = 1, col, row
make_play(1, 0, board) # white
make_play(1, 1, board) # black
make_play(2, 1, board) # white
make_play(3, 1, board, -1) # white
make_play(4, 0, board, -1) # white
# . ● . ○ ● .
# . ○ ● ● . .
# . . . . . .
mask = legal_moves(board)
self.assertEqual(mask[2], True) # suicide move should be illegal
def test_legal_moves_suicide3(self):
board, player = game_init()
make_play(1, 2, board) # black
make_play(2, 0, board) # white
make_play(3, 1, board) # black
make_play(3, 0, board) # white
make_play(1, 1, board, -1) # white
make_play(4, 1, board, -1) # white
make_play(2, 2, board, -1) # white
make_play(3, 2, board, -1) # white
# . . ● ● . .
# . ● . ○ ● .
# . ○ ● ● . .
# . . . . . .
mask = legal_moves(board)
self.assertEqual(mask[10], True) # suicide move should be illegal
def test_legal_moves_ko(self):
board, player = game_init()
make_play(0, 0, board) # black
make_play(1, 0, board) # white
make_play(1, 1, board) # black
make_play(2, 1, board) # white
make_play(8, 8, board) # black random pos
make_play(3, 0, board) # white
# ○ ● . ● . .
# . ○ ● . . .
# . . . . . .
make_play(2, 0, board) # black captures_first
# ○ . ○ ● . .
# . ○ ● . . .
# . . . . . .
mask = legal_moves(board)
self.assertEqual(board[0][0][1][0], 0) # white stone
self.assertEqual(board[0][0][1][1], 0) # was taken
self.assertEqual(board[0][0][1][2], 1) # white stone was here
self.assertEqual(board[0][0][1][3], 0) # black stone was not here
self.assertEqual(mask[1], True)
def test_legal_moves_not_ko(self):
board, player = game_init()
make_play(0, 0, board) # black
make_play(1, 0, board) # white
make_play(1, 1, board) # black
make_play(2, 0, board) # white
make_play(2, 1, board) # black
make_play(8, 8, board) # white random pos
# ○ ● ● . . .
# . ○ ○ . . .
# . . . . . .
make_play(3, 0, board) # black captures_first
# ○ . . ○ . .
# . ○ ○ . . .
# . . . . . .
mask = legal_moves(board)
self.assertEqual(board[0][0][1][0], 0) # white stone 1
self.assertEqual(board[0][0][1][1], 0) # was taken
self.assertEqual(board[0][0][2][0], 0) # white stone 2
self.assertEqual(board[0][0][2][1], 0) # was taken
self.assertEqual(board[0][0][1][2], 1) # white stone 1 was here
self.assertEqual(board[0][0][1][3], 0) # black stone was not here
self.assertEqual(board[0][0][2][2], 1) # white stone 2 was here
self.assertEqual(board[0][0][2][3], 0) # black stone was not here
self.assertEqual(mask[1], False)
self.assertEqual(mask[2], False)
def test_full_board_capture(self):
size = conf['SIZE']
board, player = game_init()
for i in range(size*size - 2):
x, y = index2coord(i)
make_play(x, y, board) # black
make_play(0, size, board) # white pass
# ○ ○ ○ ○ ○ ○
# ○ ○ ○ ○ ○ ○
# ○ ○ ○ ○ ○ ○
# ○ ○ ○ ○ . .
make_play(0, size, board) # black pass
make_play(size -1, size - 1, board) # white corner
# ○ ○ ○ ○ ○ ○
# ○ ○ ○ ○ ○ ○
# ○ ○ ○ ○ ○ ○
# ○ ○ ○ ○ . ●
for i in range(size*size - 2):
x, y = index2coord(i)
self.assertEqual(board[0][y][x][0], 1) # black stone i
self.assertEqual(board[0][y][x][1], 0) # black stone i
self.assertEqual(board[0][size - 1][size - 1][0], 0) # white stone
self.assertEqual(board[0][size - 1][size - 1][1], 1) # white stone
self.assertEqual(board[0][size - 1][size - 2][0], 0) # empty
self.assertEqual(board[0][size - 1][size - 2][1], 0) # empty
make_play(size - 2, size - 1, board) # black
# ○ ○ ○ ○ ○ ○
# ○ ○ ○ ○ ○ ○
# ○ ○ ○ ○ ○ ○
# ○ ○ ○ ○ ○ .
for i in range(size*size - 1):
x, y = index2coord(i)
self.assertEqual(board[0][y][x][0], 0) # black stone i
self.assertEqual(board[0][y][x][1], 1) # black stone i (it's white's turn)
self.assertEqual(board[0][size - 1][size - 1][0], 0) # empty
self.assertEqual(board[0][size - 1][size - 1][1], 0) # empty
make_play(size - 1, size - 1, board) # white
# . . . . . .
# . . . . . .
# . . . . . .
# . . . . . ●
for i in range(size*size - 1):
x, y = index2coord(i)
self.assertEqual(board[0][y][x][0], 0) # empty
self.assertEqual(board[0][y][x][1], 0) # empty
self.assertEqual(board[0][size - 1][size - 1][0], 0) # white
self.assertEqual(board[0][size - 1][size - 1][1], 1) # white
def test_bug(self):
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ○ ○ ● ●
# ● ● ● ● ● ● . ● ●
# ● ● ● ● ● ● ○ ○ ○
size = conf['SIZE']
board, player = game_init()
for i in range(size*size):
x, y = index2coord(i)
if (x, y) in [(5, 6), (6, 6), (6, 8), (7, 8), (8, 8)]:
make_play(x, y, board) # black
make_play(0, size, board) # white pass
elif (x, y) in [(6, 7)]:
make_play(0, size, board) # black pass
make_play(0, size, board) # white pass
else:
make_play(0, size, board) # black pass
make_play(x, y, board) # white
make_play(0, size, board) # black pass
make_play(6, 7, board) # white
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● . . ● ●
# ● ● ● ● ● ● ● ● ●
# ● ● ● ● ● ● . . .
for i in range(size*size - 1):
x, y = index2coord(i)
if (x, y) in [(5, 6), (6, 6), (6, 8), (7, 8), (8, 8)]:
self.assertEqual(board[0][y][x][0], 0) # empty
self.assertEqual(board[0][y][x][1], 0) # emtpy
else:
self.assertEqual(board[0][y][x][0], 0) # white
self.assertEqual(board[0][y][x][1], 1) # white
class TestSymmetrydTestCase(unittest.TestCase):
def setUp(self):
size = conf['SIZE']
board, player = game_init()
policy = np.zeros((1, size * size + 1), dtype=np.float32)
self.board = board
self.size = size
self.policy = policy
board = self.board
for x, y in [(1, 1), (1, 2), (1, 3), (2, 3)]:
make_play(x, y, board) # black
make_play(0, size, board) # white pass
policy[0, x + y * size] = 1
policy[0, size * size] = -1 # Pass move
def test_id(self):
board = self.board
size = self.size
old_board = np.copy(board)
board = _id(board)
for i, j in zip(old_board.reshape(-1), board.reshape(-1)):
self.assertEqual(i, j)
policy = np.arange(size*size + 1)
old_policy = np.copy(policy)
policy = _id(policy)
for i, j in zip(old_policy.reshape(-1), policy.reshape(-1)):
self.assertEqual(i, j)
def test_left_diagonal(self):
board = self.board
size = self.size
should_be_ones = [(1, 1), (2, 1), (3, 1), (3, 2)] # Transposed
board = left_diagonal(board)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(board[0,y,x,0], 1)
self.assertEqual(board[0,y,x,1], 0)
else:
self.assertEqual(board[0,y,x,0], 0)
self.assertEqual(board[0,y,x,1], 0)
policy = self.policy
policy = reverse_left_diagonal(policy)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(policy[0, x + size * y], 1)
else:
self.assertEqual(policy[0, x + size * y], 0)
self.assertEqual(policy[0, size * size], -1)
def test_vertical_axis(self):
board = self.board
size = self.size
should_be_ones = [(7, 1), (7, 2), (7, 3), (6, 3)] # vertical_axis
board = vertical_axis(board)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(board[0,y,x,0], 1)
self.assertEqual(board[0,y,x,1], 0)
else:
self.assertEqual(board[0,y,x,0], 0)
self.assertEqual(board[0,y,x,1], 0)
policy = self.policy
policy = reverse_vertical_axis(policy)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(policy[0, x + size * y], 1)
else:
self.assertEqual(policy[0, x + size * y], 0)
self.assertEqual(policy[0, size * size], -1)
def test_right_diagonal(self):
board = self.board
size = self.size
should_be_ones = [(7, 7), (6, 7), (5, 7), (5, 6)] # right diagonal
board = right_diagonal(board)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(board[0,y,x,0], 1)
self.assertEqual(board[0,y,x,1], 0)
else:
self.assertEqual(board[0,y,x,0], 0)
self.assertEqual(board[0,y,x,1], 0)
policy = self.policy
policy = reverse_right_diagonal(policy)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(policy[0, x + size * y], 1)
else:
self.assertEqual(policy[0, x + size * y], 0)
self.assertEqual(policy[0, size * size], -1)
def test_horizontal_axis(self):
board = self.board
size = self.size
should_be_ones = [(1, 7), (1, 6), (1, 5), (2, 5)] # horizontal_axis
board = horizontal_axis(board)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(board[0,y,x,0], 1)
self.assertEqual(board[0,y,x,1], 0)
else:
self.assertEqual(board[0,y,x,0], 0)
self.assertEqual(board[0,y,x,1], 0)
policy = self.policy
policy = reverse_horizontal_axis(policy)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(policy[0, x + size * y], 1)
else:
self.assertEqual(policy[0, x + size * y], 0)
self.assertEqual(policy[0, size * size], -1)
def test_rotation_90(self):
board = self.board
size = self.size
should_be_ones = [(1, 7), (2, 7), (3, 7), (3, 6)] # Rotation 90deg anticlockwise
board = rotation_90(board)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(board[0,y,x,0], 1)
self.assertEqual(board[0,y,x,1], 0)
else:
self.assertEqual(board[0,y,x,0], 0)
self.assertEqual(board[0,y,x,1], 0)
policy = self.policy
policy = reverse_rotation_90(policy)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(policy[0, x + size * y], 1)
else:
self.assertEqual(policy[0, x + size * y], 0)
self.assertEqual(policy[0, size * size], -1)
def test_rotation_180(self):
board = self.board
size = self.size
should_be_ones = [(7, 7), (7, 6), (7, 5), (6, 5)] # Rotation 180deg anticlockwise
board = rotation_180(board)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(board[0,y,x,0], 1)
self.assertEqual(board[0,y,x,1], 0)
else:
self.assertEqual(board[0,y,x,0], 0)
self.assertEqual(board[0,y,x,1], 0)
policy = self.policy
policy = reverse_rotation_180(policy)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(policy[0, x + size * y], 1)
else:
self.assertEqual(policy[0, x + size * y], 0)
self.assertEqual(policy[0, size * size], -1)
def test_rotation_270(self):
board = self.board
size = self.size
should_be_ones = [(7, 1), (6, 1), (5, 1), (5, 2)] # Rotation 270deg anticlockwise
board = rotation_270(board)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(board[0,y,x,0], 1)
self.assertEqual(board[0,y,x,1], 0)
else:
self.assertEqual(board[0,y,x,0], 0)
self.assertEqual(board[0,y,x,1], 0)
policy = self.policy
policy = reverse_rotation_270(policy)
for x, y in itertools.product(range(size), repeat=2):
if (x, y) in should_be_ones:
self.assertEqual(policy[0, x + size * y], 1)
else:
self.assertEqual(policy[0, x + size * y], 0)
self.assertEqual(policy[0, size * size], -1)
class MCTSTestCase(unittest.TestCase):
def setUp(self):
# Remove the symmetries for reproductibility
import symmetry
symmetry.SYMMETRIES = symmetry.SYMMETRIES[0:1]
size = conf['SIZE']
tree = {
'count': 0,
'mean_value': 0,
'value': 0,
'parent': None,
'subtree': {
0:{
'count': 0,
'p': 1,
'value': 0,
'mean_value': 0,
'subtree': {}
},
1: {
'count': 0,
'p': 0,
'mean_value': 0,
'value': 0,
'subtree': {}
}
}
}
tree['subtree'][0]['parent'] = tree
tree['subtree'][1]['parent'] = tree
board, player = game_init()
model = DummyModel()
self.model = model
self.board = board
self.tree = tree
init_simulation_workers()
def test_tree_depth(self):
d = tree_depth(self.tree)
assert d == 2
def tearDown(self):
destroy_simulation_workers()
def test_leaf(self):
tree = self.tree
board = self.board
model = self.model
simulate(tree, board, model, mcts_batch_size=2, original_player=1)
self.assertEqual(tree['subtree'][0]['count'], 1)
self.assertEqual(tree['subtree'][1]['count'], 1)
self.assertEqual(tree['subtree'][0]['value'], -1)
self.assertEqual(tree['subtree'][1]['value'], -1)
self.assertEqual(tree['count'], 2)
self.assertEqual(tree['value'], -2)
self.assertEqual(tree['mean_value'], -1)
def test_model_evaluation(self):
tree = self.tree
board = self.board
size = conf['SIZE']
test_board1, player = game_init()
make_play(0, 0, test_board1)
test_board2, player = game_init()
make_play(1, 0, test_board2)
class DummyModel(object):
def predict_on_batch(_, X):
size = conf['SIZE']
board1 = X[0].reshape(1, size, size, 17)
board2 = X[1].reshape(1, size, size, 17)
self.assertTrue(np.array_equal(board1, test_board1))
self.assertTrue(np.array_equal(board2, test_board2))
batch_size = X.shape[0]
policy = np.zeros((batch_size, size * size + 1), dtype=np.float32)
policy[:,0] = 1
value = np.zeros((batch_size, 1), dtype=np.float32)
value[:] = 1
return policy, value
model = DummyModel()
simulate(tree, board, model, mcts_batch_size=2, original_player=1)
def test_model_evaluation_nested(self):
tree = {
'count': 0,
'mean_value': 0,
'value': 0,
'parent': None,
'subtree':{
0:{
'count': 0,
'p': 1,
'value': 0,
'mean_value': 0,
'subtree': {
1: { # <----- This will be checked first
'count': 0,
'p': 1,
'mean_value': 0,
'value': 0,
'subtree': {},
},
2: { # <----- This will be checked second
'count': 0,
'p': 0,
'mean_value': 0,
'value': 0,
'subtree': {},
}
}
},
1: {
'count': 0,
'p': 0,
'mean_value': 0,
'value': 0,
'subtree': {},
}
}
}
tree['subtree'][0]['parent'] = tree
tree['subtree'][0]['subtree'][1]['parent'] = tree['subtree'][0]
tree['subtree'][0]['subtree'][2]['parent'] = tree['subtree'][0]
tree['subtree'][1]['parent'] = tree
d = tree_depth(tree)
assert d == 3
board = self.board
size = conf['SIZE']
test_board1, player = game_init()
make_play(0, 0, test_board1)
make_play(1, 0, test_board1)
test_board2, player = game_init()
make_play(0, 0, test_board2)
make_play(2, 0, test_board2)
class DummyModel(object):
def predict_on_batch(_, X):
size = conf['SIZE']
board1 = X[0].reshape(1, size, size, 17)
board2 = X[1].reshape(1, size, size, 17)
self.assertTrue(np.array_equal(board1, test_board1))
self.assertTrue(np.array_equal(board2, test_board2))
batch_size = X.shape[0]
policy = np.zeros((batch_size, size * size + 1), dtype=np.float32)
policy[:,0] = 1
value = np.zeros((batch_size, 1), dtype=np.float32)
value[:] = 1
return policy, value
model = DummyModel()
# Remove the symmetries for reproductibility
simulate(tree, board, model, mcts_batch_size=2, original_player =1)
def test_model_evaluation_other_nested(self):
tree = {
'count': 0,
'mean_value': 0,
'value': 0,
'parent': None,
'subtree':{
0:{
'count': 0,
'p': 1,
'value': 0,
'mean_value': 0,
'subtree': {},
},
1: {
'count': 0,
'p': 0,
'mean_value': 0,
'value': 0,
'subtree': {
0: {
'count': 0,
'p': 0,
'mean_value': 0,
'value': 0,
'subtree': {},
},
2: {
'count': 0,
'p': 1,
'mean_value': 0,
'value': 0,
'subtree': {},
}
}
}
}
}
tree['subtree'][0]['parent'] = tree
tree['subtree'][1]['parent'] = tree
tree['subtree'][1]['subtree'][0]['parent'] = tree['subtree'][1]
tree['subtree'][1]['subtree'][2]['parent'] = tree['subtree'][1]
d = tree_depth(tree)
assert d == 3
board = self.board
size = conf['SIZE']
test_board1, player = game_init()
make_play(0, 0, test_board1)
test_board2, player = game_init()
make_play(1, 0, test_board2)
make_play(2, 0, test_board2)
class DummyModel(object):
def predict_on_batch(_, X):
size = conf['SIZE']
board1 = X[0].reshape(1, size, size, 17)
board2 = X[1].reshape(1, size, size, 17)
self.assertTrue(np.array_equal(board1, test_board1))
self.assertTrue(np.array_equal(board2, test_board2))
batch_size = X.shape[0]
policy = np.zeros((batch_size, size * size + 1), dtype=np.float32)
policy[:,0] = 1
value = np.zeros((batch_size, 1), dtype=np.float32)
value[:] = 1
return policy, value
model = DummyModel()
simulate(tree, board, model, mcts_batch_size=2, original_player=1)
def test_small_batch_size(self):
tree = self.tree
model = self.model
board = self.board
simulate(tree, board, model, mcts_batch_size=1, original_player=1)
self.assertEqual(tree['subtree'][0]['count'], 1)
self.assertEqual(tree['subtree'][0]['value'], -1)
self.assertNotEqual(tree['subtree'][0]['subtree'], {})
self.assertEqual(tree['subtree'][1]['count'], 0)
self.assertEqual(tree['subtree'][1]['value'], 0)
self.assertEqual(tree['subtree'][1]['subtree'], {})
def test_nested_selected(self):
model = self.model
board = self.board
tree = {
'count': 0,
'mean_value': 0,
'value': 0,
'parent': None,
'subtree':{
0:{
'count': 0,
'p': 1,
'value': 0,
'mean_value': 0,
'subtree': {
1: {
'count': 0,
'p': 0,
'mean_value': 0,
'value': 0,
'subtree': {},
},
2: {
'count': 0,
'p': 1,
'mean_value': 0,
'value': 0,
'subtree': {},
}
}
},
1: {
'count': 0,
'p': 0,
'mean_value': 0,
'value': 0,
'subtree': {},
}
}
}
tree['subtree'][0]['parent'] = tree
tree['subtree'][0]['subtree'][1]['parent'] = tree['subtree'][0]
tree['subtree'][0]['subtree'][2]['parent'] = tree['subtree'][0]
tree['subtree'][1]['parent'] = tree
d = tree_depth(tree)
assert d == 3
simulate(tree, board, model, mcts_batch_size=2, original_player=1)
self.assertEqual(tree['subtree'][0]['count'], 2)
self.assertEqual(tree['subtree'][0]['subtree'][1]['count'], 1)
self.assertEqual(tree['subtree'][0]['subtree'][2]['count'], 1)
self.assertEqual(tree['subtree'][1]['count'], 0)
self.assertEqual(tree['subtree'][0]['value'], 2)
self.assertEqual(tree['subtree'][0]['mean_value'], 1)
self.assertEqual(tree['subtree'][1]['value'], 0)
def test_nested_other_leaves(self):
model = self.model
board = self.board
tree = {
'count': 0,
'mean_value': 0,
'value': 0,
'parent': None,
'subtree': {
0:{
'count': 0,
'p': .75,
'value': 0,
'mean_value': 0,
'subtree': {}
},
1: {
'count': 0,
'p': .25,
'mean_value': 0,
'value': 0,
'subtree': {
0: {
'count': 0,
'p': 1,
'mean_value': 0,
'value': 0,
'subtree': {},
},
2: {
'count': 0,
'p': 0,
'mean_value': 0,
'value': 0,
'subtree': {},
}
}
},
2:{
'count': 0,
'p': 0,
'value': 0,
'mean_value': 0,
'subtree': {}
},
}
}
tree['subtree'][0]['parent'] = tree
tree['subtree'][1]['parent'] = tree
tree['subtree'][1]['subtree'][0]['parent'] = tree['subtree'][1]
tree['subtree'][1]['subtree'][2]['parent'] = tree['subtree'][1]
d = tree_depth(tree)
assert d == 3
simulate(tree, board, model, mcts_batch_size=2, original_player=1)
self.assertEqual(tree['subtree'][0]['count'], 1)
self.assertEqual(tree['subtree'][0]['value'], -1)
self.assertEqual(tree['subtree'][1]['value'], 1)
self.assertEqual(tree['subtree'][1]['count'], 1)
self.assertEqual(tree['subtree'][1]['subtree'][0]['count'], 1)
self.assertEqual(tree['subtree'][1]['subtree'][0]['value'], 1)
self.assertEqual(tree['subtree'][1]['subtree'][2]['count'], 0)
self.assertEqual(tree['count'], 2)
self.assertEqual(tree['mean_value'], 0)
self.assertEqual(tree['subtree'][2]['count'], 0)
self.assertEqual(tree['subtree'][2]['subtree'], {})
d = tree_depth(tree)
assert d == 4
class PlayTestCase(unittest.TestCase):
def setUp(self):
# Remove the symmetries for reproductibility
import symmetry
symmetry.SYMMETRIES = symmetry.SYMMETRIES[0:1]
from random import seed
seed(0)
init_simulation_workers()
def tearDown(self):
destroy_simulation_workers()
# def test_play(self):
# model = DummyModel()
# mcts_simulations = 8 # mcts batch size is 8 and we need at least one batch
# game_data = play_game(model, model, mcts_simulations, conf['STOP_EXPLORATION'], self_play=True, num_moves=2)
# winner = game_data['winner']
#
# test_board1, player = game_init()
#
# board = game_data['moves'][0]['board']
# self.assertTrue(np.array_equal(board, test_board1)) # First board is empty
#
# self.assertEqual(winner, 0) # White should win with 5.5 komi after 2 moves
#
# for move, move_data in enumerate(game_data['moves'][::2]): # Black player lost
# value_target = 1 if winner == move_data['player'] else -1
#
# self.assertEqual(move_data['player'], 1)
# self.assertEqual(value_target, -1)
#
# for move, move_data in enumerate(game_data['moves'][1::2]): # White player won
# value_target = 1 if winner == move_data['player'] else -1
#
# self.assertEqual(move_data['player'], 0)
# self.assertEqual(value_target, 1)
def test_new_tree_called_once_self_play(self):
import self_play
fn = self_play.new_tree
self.count = 0
def monkey_patch_new_tree(*args, **kwargs):
self.count += 1
return fn(*args, **kwargs)
self_play.new_tree = monkey_patch_new_tree
model = DummyModel()
mcts_simulations = 8 # We want some mcts exploration
play_game(model, model, mcts_simulations, conf['STOP_EXPLORATION'], self_play=True, num_moves=5)
self.assertEqual(self.count, 1) # Only one tree was created
def test_new_tree_called_twice_evaluation(self):
import self_play
fn = self_play.new_tree
self.count = 0
def monkey_patch_new_tree(*args, **kwargs):
self.count += 1
return fn(*args, **kwargs)
self_play.new_tree = monkey_patch_new_tree
model = DummyModel()
mcts_simulations = 32 # We want some mcts exploration
play_game(model, model, mcts_simulations, stop_exploration=0, self_play=False, num_moves=2)
# This works because we deactivate exploration and dirichlet noise in order to have
# deterministic play
self.assertEqual(self.count, 2) # Only one 2 trees were created
class SGFTestCase(unittest.TestCase):
def test_save_sgf(self):
model = DummyModel()
init_simulation_workers()
mcts_simulations = 8 # mcts batch size is 8 and we need at least one batch
game_data = play_game(model, model, mcts_simulations, conf['STOP_EXPLORATION'], self_play=True, num_moves=10)
save_game_sgf("test_model", 0, game_data)
destroy_simulation_workers()
os.remove("games/test_model/game_000.sgf")
os.removedirs("games/test_model")
if __name__ == '__main__':
unittest.main()
| 35.686036
| 118
| 0.454285
| 5,148
| 41,146
| 3.574981
| 0.052448
| 0.019235
| 0.024125
| 0.030863
| 0.815747
| 0.789176
| 0.763693
| 0.740057
| 0.71278
| 0.68333
| 0
| 0.062196
| 0.395105
| 41,146
| 1,152
| 119
| 35.717014
| 0.665917
| 0.092986
| 0
| 0.733042
| 0
| 0
| 0.043037
| 0.000782
| 0
| 0
| 0
| 0
| 0.175055
| 1
| 0.055799
| false
| 0
| 0.0186
| 0
| 0.094092
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
96c0bbead66260408469d53aec6e636559e98e46
| 13,997
|
py
|
Python
|
forest/forestdata.py
|
ThePyProgrammer/forest-pypi
|
b90c0d11a9d78d69593388e37d205a787bd66353
|
[
"MIT"
] | 1
|
2021-08-05T13:43:45.000Z
|
2021-08-05T13:43:45.000Z
|
forest/forestdata.py
|
gait-analyzer/GaitMonitoringForParkinsonsDiseasePatients
|
2064375ddc36bf38f3ff65f09e776328b8b4612a
|
[
"MIT"
] | null | null | null |
forest/forestdata.py
|
gait-analyzer/GaitMonitoringForParkinsonsDiseasePatients
|
2064375ddc36bf38f3ff65f09e776328b8b4612a
|
[
"MIT"
] | null | null | null |
import pandas as pd
def opsd():
return pd.read_csv('https://raw.githubusercontent.com/jenfly/opsd/master/opsd_germany_daily.csv', sep=",")
def presidents():
return pd.read_csv('https://sololearn.com/uploads/files/president_heights_party.csv', index_col='name')
def sacramento_crime():
return pd.read_csv('http://samplecsvs.s3.amazonaws.com/SacramentocrimeJanuary2006.csv')
def titanic():
return pd.read_csv('https://raw.githubusercontent.com/datasciencedojo/datasets/master/titanic.csv')
def titanic_test():
return pd.read_csv('https://raw.githubusercontent.com/justmarkham/pandas-videos/master/data/titanic_test.csv')
def titanic_train():
return pd.read_csv('https://raw.githubusercontent.com/justmarkham/pandas-videos/master/data/titanic_train.csv')
def kyphosis():
return pd.read_csv('https://raw.githubusercontent.com/raorao/datasciencedojo/master/Datasets/kyphosis.csv')
def hive_sample():
return pd.read_csv('https://raw.githubusercontent.com/olympiacos23/datasciencedojo/master/Datasets/HiveSampleData.csv')
def oxygen():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/oxygen.csv')
def ozone():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/ozone.csv')
def phone_transcripts():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/phone_transcripts.csv')
def text_classification():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/text_classfication.csv')
def tourists():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/tourists.csv')
def diamonds():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/diamonds.csv')
def life_expectancy():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/Life_Expectancy_Data.csv')
def yahoo():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/yahoo.csv')
def earthquake():
return pd.read_csv('https://raw.githubusercontent.com/plotly/datasets/master/earthquake.csv')
def flightdata():
return pd.read_csv('https://raw.githubusercontent.com/plotly/datasets/master/flightdata.csv')
def vortex():
return pd.read_csv('https://raw.githubusercontent.com/plotly/datasets/master/vortex.csv')
def USArrests():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/USArrests.csv')
def wage():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/Wage.csv')
def letter_recogniton():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/LetterRecognition.csv')
def ionosphere():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/Ionosphere.csv')
def glass():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/Glass.csv')
def german_credit():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/GermanCredit.csv')
def college():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/College.csv')
def tedtalks():
return pd.read_json('https://raw.githubusercontent.com/algolia/datasets/master/tedtalks/talks.json')
def movies():
return pd.read_json('https://raw.githubusercontent.com/algolia/datasets/master/movies/records.json')
def german_credit():
return pd.read_csv('https://raw.githubusercontent.com/selva86/datasets/master/GermanCredit.csv')
def pollution():
return pd.read_csv('https://raw.githubusercontent.com/jbrownlee/Datasets/master/pollution.csv')
def sonar():
return pd.read_csv('https://raw.githubusercontent.com/jbrownlee/Datasets/master/sonar.csv')
def adult():
return pd.read_csv('https://raw.githubusercontent.com/jbrownlee/Datasets/master/adult-all.csv')
def smsspam():
return pd.read_csv('https://raw.githubusercontent.com/stedy/Machine-Learning-with-R-datasets/master/sms_spam.csv')
def credit():
return pd.read_csv('https://raw.githubusercontent.com/stedy/Machine-Learning-with-R-datasets/master/credit.csv')
def concrete():
return pd.read_csv('https://raw.githubusercontent.com/stedy/Machine-Learning-with-R-datasets/master/concrete.csv')
def snsdata():
return pd.read_csv('https://raw.githubusercontent.com/stedy/Machine-Learning-with-R-datasets/master/snsdata.csv')
def insurance():
return pd.read_csv('https://raw.githubusercontent.com/stedy/Machine-Learning-with-R-datasets/master/insurance.csv')
def used_cars():
return pd.read_csv('https://raw.githubusercontent.com/stedy/Machine-Learning-with-R-datasets/master/usedcars.csv')
def movie_actors():
return pd.read_json('https://raw.githubusercontent.com/algolia/datasets/master/movies/actors.json')
def streeteasy():
return pd.read_csv('https://raw.githubusercontent.com/Codecademy/datasets/master/streeteasy/streeteasy.csv')
def streeteasy_brooklyn():
return pd.read_csv('https://raw.githubusercontent.com/Codecademy/datasets/master/streeteasy/brooklyn.csv')
def streeteasy_manhattan():
return pd.read_csv('https://raw.githubusercontent.com/Codecademy/datasets/master/streeteasy/manhattan.csv')
def streeteasy_queens():
return pd.read_csv('https://raw.githubusercontent.com/Codecademy/datasets/master/streeteasy/queens.csv')
def uiuc_gpa():
return pd.read_csv('https://raw.githubusercontent.com/wadefagen/datasets/master/gpa/uiuc-gpa-dataset.csv')
def uiuc_students():
return pd.read_csv('https://raw.githubusercontent.com/wadefagen/datasets/master/students-by-state/uiuc-students-by-state.csv')
def twonorm():
return pd.read_csv('https://raw.githubusercontent.com/mikeizbicki/datasets/master/csv/ida/twonorm_data.csv')
def waveform():
return pd.read_csv('https://raw.githubusercontent.com/mikeizbicki/datasets/master/csv/ida/waveform_data.csv')
def pima_indian_diabetes():
return pd.read_csv('https://raw.githubusercontent.com/mikeizbicki/datasets/master/csv/uci/pima-indians-diabetes.csv')
def ilpd():
return pd.read_csv('https://raw.githubusercontent.com/mikeizbicki/datasets/master/csv/uci/Indian%20Liver%20Patient%20Dataset%20(ILPD).csv')
def stocks():
return pd.read_csv('https://raw.githubusercontent.com/skathirmani/datasets/master/stock-prices.csv')
def movie_ratings():
return pd.read_csv('https://raw.githubusercontent.com/skathirmani/datasets/master/ratings.csv')
def restaurants():
return pd.read_csv('https://raw.githubusercontent.com/skathirmani/datasets/master/restaurants.csv')
def modi_tweets():
return pd.read_csv('https://raw.githubusercontent.com/skathirmani/datasets/master/narendramodi_tweets.csv')
def deliveries():
return pd.read_csv('https://raw.githubusercontent.com/skathirmani/datasets/master/deliveries.csv')
def ellipse():
return pd.read_csv('https://raw.githubusercontent.com/skathirmani/datasets/master/ellipse.csv')
def car_data():
return pd.read_csv('https://raw.githubusercontent.com/skathirmani/datasets/master/car_data.csv')
def delhi_2014():
return pd.read_csv('https://raw.githubusercontent.com/mwermelinger/Learn-to-code-for-data-analysis/master/2_Cleaning_up_our_act/Delhi_DEL_2014.csv')
def beijing_2014():
return pd.read_csv('https://raw.githubusercontent.com/mwermelinger/Learn-to-code-for-data-analysis/master/2_Cleaning_up_our_act/Beijing_PEK_2014.csv')
def brasilia_2014():
return pd.read_csv('https://raw.githubusercontent.com/mwermelinger/Learn-to-code-for-data-analysis/master/2_Cleaning_up_our_act/Brasilia_BSB_2014.csv')
def capetown_2014():
return pd.read_csv('https://raw.githubusercontent.com/mwermelinger/Learn-to-code-for-data-analysis/master/2_Cleaning_up_our_act/CapeTown_CPT_2014.csv')
def london_2014():
return pd.read_csv('https://raw.githubusercontent.com/mwermelinger/Learn-to-code-for-data-analysis/master/2_Cleaning_up_our_act/London_2014.csv')
def moscow_2014():
return pd.read_csv('https://raw.githubusercontent.com/mwermelinger/Learn-to-code-for-data-analysis/master/2_Cleaning_up_our_act/Moscow_SVO_2014.csv')
def WHO_POP_2014():
return pd.read_csv('https://raw.githubusercontent.com/mwermelinger/Learn-to-code-for-data-analysis/master/2_Cleaning_up_our_act/WHO%20POP%20TB%20all.csv')
def countries():
return pd.read_csv('https://raw.githubusercontent.com/arangodb/example-datasets/master/Countries/countries.csv')
def mcdonalds_france():
return pd.read_csv('https://raw.githubusercontent.com/arangodb/example-datasets/master/McDonalds/france.csv')
def regions():
return pd.read_csv('https://raw.githubusercontent.com/arangodb/example-datasets/master/Regions/regions.csv')
def airports():
return pd.read_csv('https://raw.githubusercontent.com/arangodb/example-datasets/master/Airports/airports.csv')
def bezirke():
return pd.read_csv('https://raw.githubusercontent.com/arangodb/example-datasets/master/Bezirke/bezirke.csv')
def GeoLiteCity():
return pd.read_csv('https://raw.githubusercontent.com/arangodb/example-datasets/master/Cities/GeoLiteCity.csv')
def coronavirus():
return pd.read_csv('https://raw.githubusercontent.com/RamiKrispin/coronavirus/master/csv/coronavirus.csv')
def covid_owid():
return pd.read_csv('https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/owid-covid-data.csv')
def population():
return pd.read_csv('https://raw.githubusercontent.com/rafikahmed/worldometers/master/population_dataset.csv')
def superhero():
return pd.read_csv('https://raw.githubusercontent.com/mafudge/datasets/master/superhero/superhero-movie-dataset-1978-2012.csv', header=0)
def marvel():
return pd.read_csv('https://raw.githubusercontent.com/fivethirtyeight/data/master/comic-characters/marvel-wikia-data.csv')
def dc():
return pd.read_csv('https://raw.githubusercontent.com/fivethirtyeight/data/master/comic-characters/dc-wikia-data.csv')
def usbirths94_03():
return pd.read_csv('https://raw.githubusercontent.com/fivethirtyeight/data/master/births/US_births_1994-2003_CDC_NCHS.csv')
def usbirths00_14():
return pd.read_csv('https://raw.githubusercontent.com/fivethirtyeight/data/master/births/US_births_2000-2014_SSA.csv')
def avengers():
return pd.read_csv('https://raw.githubusercontent.com/fivethirtyeight/data/master/avengers/avengers.csv')
def starwars():
return pd.read_csv('https://raw.githubusercontent.com/fivethirtyeight/data/master/star-wars-survey/StarWars.csv')
def tarantino():
return pd.read_csv('https://raw.githubusercontent.com/fivethirtyeight/data/master/tarantino/tarantino.csv')
def churn():
return pd.read_csv('https://raw.githubusercontent.com/albayraktaroglu/Datasets/master/churn.csv')
def shanghai_sp():
return pd.read_csv('https://raw.githubusercontent.com/zilinskyjan/datasets/master/china/shanghai_sp_correlation.csv')
def uber():
return pd.read_csv('https://raw.githubusercontent.com/ChitturiPadma/datasets/master/uber.csv')
def output_volatility():
return pd.read_csv('https://raw.githubusercontent.com/zilinskyjan/datasets/master/blogs/output%20volatility%20dataset.csv')
def eurobarometer():
return pd.read_csv('https://raw.githubusercontent.com/zilinskyjan/datasets/master/economic_sentiment/eurobarometer.csv')
def growth_gdp_public_spending():
return pd.read_csv('https://raw.githubusercontent.com/zilinskyjan/datasets/master/fiscal/growht_gdp_public_spending.csv')
def fires():
return pd.read_csv('https://raw.githubusercontent.com/datanews/track-fires/master/track-fires.csv')
def illegal_hotel_inspections():
return pd.read_csv('https://raw.githubusercontent.com/datanews/illegal-hotel-inspections/master/inspections.csv')
def fashion_mnist():
return pd.read_csv('https://raw.githubusercontent.com/trekhleb/homemade-machine-learning/master/data/fashion-mnist-demo.csv')
def mnist():
return pd.read_csv('https://raw.githubusercontent.com/trekhleb/homemade-machine-learning/master/data/mnist-demo.csv')
def whr2017():
return pd.read_csv('https://raw.githubusercontent.com/trekhleb/homemade-machine-learning/master/data/world-happiness-report-2017.csv')
class LicensePlates:
@staticmethod
def accepted_plates():
return pd.read_csv('https://raw.githubusercontent.com/datanews/license-plates/master/accepted-plates.csv')
@staticmethod
def rejected_plates():
return pd.read_csv('https://raw.githubusercontent.com/datanews/license-plates/master/rejected-plates.csv')
def ufo():
return pd.read_csv('https://raw.githubusercontent.com/justmarkham/pandas-videos/master/data/ufo.csv')
def chipotle():
return pd.read_csv('https://raw.githubusercontent.com/justmarkham/pandas-videos/master/data/chipotle.tsv')
def imdb_1000():
return pd.read_csv('https://raw.githubusercontent.com/justmarkham/pandas-videos/master/data/imdb_1000.csv')
class MovieLens:
@staticmethod
def users():
user_cols = ['user_id', 'age', 'gender', 'occupation', 'zip_code']
return pd.read_table('http://bit.ly/movieusers', sep='|', header=None, names=user_cols)
@staticmethod
def movies():
movie_cols = ['movie_id', 'title']
return pd.read_table("https://raw.githubusercontent.com/justmarkham/pandas-videos/master/data/u.item", sep='|', header=None, names=movie_cols, usecols=[0, 1])
@staticmethod
def ratings():
rating_cols = ['user_id', 'movie_id', 'rating', 'timestamp']
ratings = pd.read_table('https://raw.githubusercontent.com/justmarkham/pandas-videos/master/data/u.data', sep='\t', header=None, names=rating_cols)
def drinks():
return pd.read_csv('http://bit.ly/drinksbycountry')
| 44.576433
| 167
| 0.756448
| 1,851
| 13,997
| 5.598055
| 0.158293
| 0.057904
| 0.11465
| 0.259409
| 0.668983
| 0.663385
| 0.663385
| 0.663385
| 0.621888
| 0.611465
| 0
| 0.013357
| 0.096021
| 13,997
| 313
| 168
| 44.71885
| 0.80558
| 0
| 0
| 0.052133
| 0
| 0.222749
| 0.629394
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.473934
| false
| 0
| 0.004739
| 0.459716
| 0.957346
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
737194884a6ca04113a34ad72071369ed926d929
| 261
|
py
|
Python
|
bin/train_extractive_qa.py
|
tombosc/dict_based_learning
|
c5e622d6de43ee6d892f8bccedc0a43a2f02a284
|
[
"MIT"
] | 6
|
2017-06-08T18:20:19.000Z
|
2019-01-01T09:56:51.000Z
|
bin/train_extractive_qa.py
|
tombosc/dict_based_learning
|
c5e622d6de43ee6d892f8bccedc0a43a2f02a284
|
[
"MIT"
] | null | null | null |
bin/train_extractive_qa.py
|
tombosc/dict_based_learning
|
c5e622d6de43ee6d892f8bccedc0a43a2f02a284
|
[
"MIT"
] | 3
|
2017-06-09T07:34:49.000Z
|
2020-08-06T07:33:36.000Z
|
#!/usr/bin/env python
from dictlearn.extractive_qa_training import train_extractive_qa
from dictlearn.extractive_qa_configs import qa_config_registry
from dictlearn.main import main
if __name__ == "__main__":
main(qa_config_registry, train_extractive_qa)
| 29
| 64
| 0.83908
| 37
| 261
| 5.378378
| 0.459459
| 0.241206
| 0.231156
| 0.251256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099617
| 261
| 8
| 65
| 32.625
| 0.846809
| 0.076628
| 0
| 0
| 0
| 0
| 0.033333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
738821e78dcfa3a3b8852942ca104823b2c1a308
| 4,427
|
py
|
Python
|
tests/storage.py
|
zodman/django-constance
|
dec2322262680dece25e0a1c5cada4f19aa86d4b
|
[
"BSD-3-Clause"
] | 6
|
2015-12-13T10:49:02.000Z
|
2021-06-14T14:35:31.000Z
|
tests/storage.py
|
zodman/django-constance
|
dec2322262680dece25e0a1c5cada4f19aa86d4b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/storage.py
|
zodman/django-constance
|
dec2322262680dece25e0a1c5cada4f19aa86d4b
|
[
"BSD-3-Clause"
] | 10
|
2016-04-23T21:16:30.000Z
|
2020-05-13T08:06:22.000Z
|
# -*- encoding: utf-8 -*-
from datetime import datetime, date, time, timedelta
from decimal import Decimal
from django.utils import six
from constance.base import Config
if six.PY3:
def long(value):
return value
class StorageTestsMixin(object):
def setUp(self):
self.config = Config()
super(StorageTestsMixin, self).setUp()
def test_store(self):
self.assertEqual(self.config.INT_VALUE, 1)
self.assertEqual(self.config.LONG_VALUE, long(123456))
self.assertEqual(self.config.BOOL_VALUE, True)
self.assertEqual(self.config.STRING_VALUE, 'Hello world')
self.assertEqual(self.config.UNICODE_VALUE, u'Rivière-Bonjour რუსთაველი')
self.assertEqual(self.config.DECIMAL_VALUE, Decimal('0.1'))
self.assertEqual(self.config.DATETIME_VALUE, datetime(2010, 8, 23, 11, 29, 24))
self.assertEqual(self.config.FLOAT_VALUE, 3.1415926536)
self.assertEqual(self.config.DATE_VALUE, date(2010, 12, 24))
self.assertEqual(self.config.TIME_VALUE, time(23, 59, 59))
self.assertEqual(self.config.TIMEDELTA_VALUE, timedelta(days=1, hours=2, minutes=3))
self.assertEqual(self.config.CHOICE_VALUE, 'yes')
self.assertEqual(self.config.EMAIL_VALUE, 'test@example.com')
# set values
self.config.INT_VALUE = 100
self.config.LONG_VALUE = long(654321)
self.config.BOOL_VALUE = False
self.config.STRING_VALUE = 'Beware the weeping angel'
self.config.UNICODE_VALUE = u'Québec'
self.config.DECIMAL_VALUE = Decimal('1.2')
self.config.DATETIME_VALUE = datetime(1977, 10, 2)
self.config.FLOAT_VALUE = 2.718281845905
self.config.DATE_VALUE = date(2001, 12, 20)
self.config.TIME_VALUE = time(1, 59, 0)
self.config.TIMEDELTA_VALUE = timedelta(days=2, hours=3, minutes=4)
self.config.CHOICE_VALUE = 'no'
self.config.EMAIL_VALUE = 'foo@bar.com'
# read again
self.assertEqual(self.config.INT_VALUE, 100)
self.assertEqual(self.config.LONG_VALUE, long(654321))
self.assertEqual(self.config.BOOL_VALUE, False)
self.assertEqual(self.config.STRING_VALUE, 'Beware the weeping angel')
self.assertEqual(self.config.UNICODE_VALUE, u'Québec')
self.assertEqual(self.config.DECIMAL_VALUE, Decimal('1.2'))
self.assertEqual(self.config.DATETIME_VALUE, datetime(1977, 10, 2))
self.assertEqual(self.config.FLOAT_VALUE, 2.718281845905)
self.assertEqual(self.config.DATE_VALUE, date(2001, 12, 20))
self.assertEqual(self.config.TIME_VALUE, time(1, 59, 0))
self.assertEqual(self.config.TIMEDELTA_VALUE, timedelta(days=2, hours=3, minutes=4))
self.assertEqual(self.config.CHOICE_VALUE, 'no')
self.assertEqual(self.config.EMAIL_VALUE, 'foo@bar.com')
def test_nonexistent(self):
try:
self.config.NON_EXISTENT
except Exception as e:
self.assertEqual(type(e), AttributeError)
try:
self.config.NON_EXISTENT = 1
except Exception as e:
self.assertEqual(type(e), AttributeError)
def test_missing_values(self):
# set some values and leave out others
self.config.LONG_VALUE = long(654321)
self.config.BOOL_VALUE = False
self.config.UNICODE_VALUE = u'Québec'
self.config.DECIMAL_VALUE = Decimal('1.2')
self.config.DATETIME_VALUE = datetime(1977, 10, 2)
self.config.DATE_VALUE = date(2001, 12, 20)
self.config.TIME_VALUE = time(1, 59, 0)
self.assertEqual(self.config.INT_VALUE, 1) # this should be the default value
self.assertEqual(self.config.LONG_VALUE, long(654321))
self.assertEqual(self.config.BOOL_VALUE, False)
self.assertEqual(self.config.STRING_VALUE, 'Hello world') # this should be the default value
self.assertEqual(self.config.UNICODE_VALUE, u'Québec')
self.assertEqual(self.config.DECIMAL_VALUE, Decimal('1.2'))
self.assertEqual(self.config.DATETIME_VALUE, datetime(1977, 10, 2))
self.assertEqual(self.config.FLOAT_VALUE, 3.1415926536) # this should be the default value
self.assertEqual(self.config.DATE_VALUE, date(2001, 12, 20))
self.assertEqual(self.config.TIME_VALUE, time(1, 59, 0))
self.assertEqual(self.config.TIMEDELTA_VALUE, timedelta(days=1, hours=2, minutes=3))
| 45.639175
| 101
| 0.678789
| 587
| 4,427
| 5.011925
| 0.178876
| 0.203943
| 0.238953
| 0.314412
| 0.852141
| 0.834126
| 0.755948
| 0.627124
| 0.588715
| 0.53535
| 0
| 0.060709
| 0.20375
| 4,427
| 96
| 102
| 46.114583
| 0.773901
| 0.040885
| 0
| 0.531646
| 0
| 0
| 0.042237
| 0
| 0
| 0
| 0
| 0
| 0.493671
| 1
| 0.063291
| false
| 0
| 0.050633
| 0.012658
| 0.139241
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
73988d97ccc18affce0a0738f261d936ceb3c063
| 435
|
py
|
Python
|
bugtests/test130.py
|
doom38/jython_v2.2.1
|
0803a0c953c294e6d14f9fc7d08edf6a3e630a15
|
[
"CNRI-Jython"
] | null | null | null |
bugtests/test130.py
|
doom38/jython_v2.2.1
|
0803a0c953c294e6d14f9fc7d08edf6a3e630a15
|
[
"CNRI-Jython"
] | null | null | null |
bugtests/test130.py
|
doom38/jython_v2.2.1
|
0803a0c953c294e6d14f9fc7d08edf6a3e630a15
|
[
"CNRI-Jython"
] | null | null | null |
"""
Comparing ints and strings
"""
import support
if -1 > 'a':
raise support.TestError("-1 > 'a'")
if not -1 < 'a':
raise support.TestError("-1 < 'a'")
if 4 > 'a':
raise support.TestError("4 > 'a'")
if not 4 < 'a':
raise support.TestError("4 < 'a'")
if -2 > 'a':
raise support.TestError("-2 > 'a'")
if not -2 < 'a':
raise support.TestError("-2 < 'a'")
if -1 == 'a':
raise support.TestError("-1 == 'a'")
| 18.913043
| 40
| 0.537931
| 65
| 435
| 3.6
| 0.215385
| 0.179487
| 0.388889
| 0.65812
| 0.807692
| 0.807692
| 0.807692
| 0.807692
| 0
| 0
| 0
| 0.041667
| 0.227586
| 435
| 22
| 41
| 19.772727
| 0.654762
| 0.05977
| 0
| 0
| 0
| 0
| 0.155388
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.066667
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
73bd67d6b0efedcd448a5128f7d1380618d5fba5
| 44,715
|
py
|
Python
|
import_export_ballotpedia/views_admin.py
|
mattare2/WeVoteServer
|
c12f1a4a64a94a3b22f97c9582ed1058749326d5
|
[
"MIT"
] | null | null | null |
import_export_ballotpedia/views_admin.py
|
mattare2/WeVoteServer
|
c12f1a4a64a94a3b22f97c9582ed1058749326d5
|
[
"MIT"
] | null | null | null |
import_export_ballotpedia/views_admin.py
|
mattare2/WeVoteServer
|
c12f1a4a64a94a3b22f97c9582ed1058749326d5
|
[
"MIT"
] | null | null | null |
# import_export_ballotpedia/views_admin.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from .controllers import attach_ballotpedia_election_by_district_from_api, \
retrieve_ballot_items_from_polling_location, \
retrieve_ballotpedia_candidates_by_district_from_api, retrieve_ballotpedia_measures_by_district_from_api, \
retrieve_ballotpedia_district_id_list_for_polling_location, retrieve_ballotpedia_offices_by_district_from_api
# retrieve_ballotpedia_offices_by_election_from_api
from admin_tools.views import redirect_to_sign_in_page
from config.base import get_environment_variable
from datetime import date
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.messages import get_messages
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.http import HttpResponseRedirect
from django.shortcuts import redirect, render
from election.models import Election, ElectionManager
from import_export_batches.models import BatchSet, BATCH_SET_SOURCE_IMPORT_BALLOTPEDIA_BALLOT_ITEMS
from polling_location.models import PollingLocation
from voter.models import voter_has_authority
import wevote_functions.admin
from wevote_functions.functions import convert_to_int, is_valid_state_code, positive_value_exists
logger = wevote_functions.admin.get_logger(__name__)
BALLOTPEDIA_API_CONTAINS_URL = get_environment_variable("BALLOTPEDIA_API_CONTAINS_URL")
CANDIDATE = 'CANDIDATE'
CONTEST_OFFICE = 'CONTEST_OFFICE'
ELECTED_OFFICE = 'ELECTED_OFFICE'
IMPORT_BALLOT_ITEM = 'IMPORT_BALLOT_ITEM'
IMPORT_VOTER = 'IMPORT_VOTER'
MEASURE = 'MEASURE'
POLITICIAN = 'POLITICIAN'
@login_required
def import_ballot_items_for_location_view(request):
"""
Reach out to Ballotpedia API to retrieve a short list of districts the voter can vote in.
"""
# admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'political_data_manager'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
polling_location_we_vote_id = request.GET.get('polling_location_we_vote_id', "")
state_code = request.GET.get('state_code', "")
results = retrieve_ballot_items_from_polling_location(
google_civic_election_id, polling_location_we_vote_id, state_code=state_code)
kind_of_batch = ""
if 'kind_of_batch' in results:
kind_of_batch = results['kind_of_batch']
if not positive_value_exists(kind_of_batch):
kind_of_batch = IMPORT_BALLOT_ITEM
batch_header_id = 0
if 'batch_saved' in results and results['batch_saved']:
messages.add_message(request, messages.INFO, 'Ballot items import batch for {google_civic_election_id} '
'election saved.'
''.format(google_civic_election_id=google_civic_election_id))
batch_header_id = results['batch_header_id']
elif 'batch_header_id' in results and results['batch_header_id']:
messages.add_message(request, messages.INFO, 'Ballot items import batch for {google_civic_election_id} '
'election saved, batch_header_id.'
''.format(google_civic_election_id=google_civic_election_id))
batch_header_id = results['batch_header_id']
else:
messages.add_message(request, messages.ERROR, results['status'])
if positive_value_exists(batch_header_id):
# Go straight to the new batch
return HttpResponseRedirect(reverse('import_export_batches:batch_action_list', args=()) +
"?batch_header_id=" + str(batch_header_id) +
"&kind_of_batch=" + str(kind_of_batch) +
"&google_civic_election_id=" + str(google_civic_election_id))
else:
# Go to the ballot_item_list_edit page
if positive_value_exists(polling_location_we_vote_id):
return HttpResponseRedirect(reverse('ballot:ballot_item_list_by_polling_location_edit',
args=(polling_location_we_vote_id,)) +
"?google_civic_election_id=" + str(google_civic_election_id) +
"&polling_location_we_vote_id=" + str(polling_location_we_vote_id) +
"&state_code=" + str(state_code)
)
else:
messages.add_message(request, messages.ERROR, "Missing polling_location_we_vote_id.")
return HttpResponseRedirect(reverse('election:election_list', args=()) +
"?google_civic_election_id=" + str(google_civic_election_id) +
"&polling_location_we_vote_id=" + str(polling_location_we_vote_id) +
"&state_code=" + str(state_code)
)
@login_required
def import_export_ballotpedia_index_view(request):
"""
Provide an index of import/export actions (for We Vote data maintenance)
"""
messages_on_stage = get_messages(request)
template_values = {
'messages_on_stage': messages_on_stage,
}
return render(request, 'import_export_ballotpedia/index.html', template_values)
@login_required
def attach_ballotpedia_election_view(request, election_local_id=0):
"""
Reach out to Ballotpedia and retrieve the details about this election needed to make other API calls.
:param request:
:param election_local_id:
:return:
"""
# admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'political_data_manager'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
state_code = request.GET.get('state_code', '')
force_district_retrieve_from_ballotpedia = request.GET.get('force_district_retrieve_from_ballotpedia', False)
polling_location_list = []
status = ""
try:
election_on_stage = Election.objects.get(id=election_local_id)
google_civic_election_id = election_on_stage.google_civic_election_id
election_state_code = election_on_stage.get_election_state()
election_name = election_on_stage.election_name
is_national_election = election_on_stage.is_national_election
except Election.MultipleObjectsReturned as e:
messages.add_message(request, messages.ERROR,
'Could not retrieve election data. More than one election found.')
return HttpResponseRedirect(reverse('election:election_list', args=()))
except Election.DoesNotExist:
messages.add_message(request, messages.ERROR,
'Could not retrieve election data. Election could not be found.')
return HttpResponseRedirect(reverse('election:election_list', args=()))
# Check to see if we have polling location data related to the region(s) covered by this election
# We request the ballot data for each polling location as a way to build up our local data
if not positive_value_exists(state_code) and positive_value_exists(google_civic_election_id):
state_code = election_state_code
if positive_value_exists(is_national_election) and not positive_value_exists(state_code):
messages.add_message(request, messages.ERROR,
'For National elections, a State Code is required in order to run any '
'Ballotpedia data preparation.')
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)))
if not is_valid_state_code(state_code):
messages.add_message(request, messages.ERROR,
'{state_code} is not a valid State Code'.format(state_code=state_code))
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)))
try:
polling_location_count_query = PollingLocation.objects.all()
polling_location_count_query = polling_location_count_query.filter(state__iexact=state_code)
polling_location_count_query = polling_location_count_query.filter(polling_location_deleted=False)
polling_location_count_query = polling_location_count_query.exclude(
Q(latitude__isnull=True) | Q(latitude__exact=0.0))
polling_location_count_query = polling_location_count_query.exclude(
Q(zip_long__isnull=True) | Q(zip_long__exact='0') | Q(zip_long__exact=''))
polling_location_count = polling_location_count_query.count()
if positive_value_exists(polling_location_count):
polling_location_query = PollingLocation.objects.all()
polling_location_query = polling_location_query.filter(state__iexact=state_code)
polling_location_query = polling_location_query.filter(polling_location_deleted=False)
polling_location_query = polling_location_query.exclude(
Q(latitude__isnull=True) | Q(latitude__exact=0.0))
polling_location_query = polling_location_query.exclude(
Q(zip_long__isnull=True) | Q(zip_long__exact='0') | Q(zip_long__exact=''))
# Ordering by "location_name" creates a bit of (locational) random order
polling_location_list = polling_location_query.order_by('location_name')[:1000]
except PollingLocation.DoesNotExist:
messages.add_message(request, messages.INFO,
'Could not retrieve polling location data for the {election_name}. '
'No polling locations exist for the state \'{state}\'. '
'Data needed from VIP.'.format(
election_name=election_name,
state=state_code))
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)) +
"?state_code=" + str(state_code))
if polling_location_count == 0:
messages.add_message(request, messages.ERROR,
'Could not retrieve ballot data for the {election_name}. '
'No polling locations returned for the state \'{state}\'. (error 2)'.format(
election_name=election_name,
state=state_code))
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)) +
"?state_code=" + str(state_code))
# If here, we know that we have some polling_locations to use in order to retrieve ballotpedia districts
could_not_retrieve_district_id_list_for_polling_location_count = 0
merged_district_list = []
for polling_location in polling_location_list:
one_ballot_results = retrieve_ballotpedia_district_id_list_for_polling_location(
google_civic_election_id, polling_location=polling_location,
force_district_retrieve_from_ballotpedia=force_district_retrieve_from_ballotpedia)
if one_ballot_results['success']:
ballotpedia_district_id_list = one_ballot_results['ballotpedia_district_id_list']
if len(ballotpedia_district_id_list):
for one_ballotpedia_district_id in ballotpedia_district_id_list:
if one_ballotpedia_district_id not in merged_district_list:
# Build up a list of ballotpedia districts that we need to retrieve races for
merged_district_list.append(one_ballotpedia_district_id)
else:
could_not_retrieve_district_id_list_for_polling_location_count += 1
if positive_value_exists(could_not_retrieve_district_id_list_for_polling_location_count):
messages.add_message(request, messages.ERROR,
'Could not retrieve district_id list for this many Polling Locations: ' +
str(could_not_retrieve_district_id_list_for_polling_location_count))
# Once we have a summary of all ballotpedia districts, we want to request all of the races
if not len(merged_district_list):
messages.add_message(request, messages.ERROR,
'Could not find Ballotpedia districts. ')
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)) +
'?google_civic_election_id=' + str(google_civic_election_id) +
"&state_code=" + str(state_code))
results = attach_ballotpedia_election_by_district_from_api(election_on_stage, google_civic_election_id,
merged_district_list, state_code)
status += results['status']
status = status[:1000]
if positive_value_exists(results['election_found']):
messages.add_message(request, messages.INFO,
'Ballotpedia election information attached. status: {status} '.format(status=status))
else:
# We limit the number of status characters we print to the screen to 2000 so we don't get
# the error "Not all temporary messages could be stored."
messages.add_message(request, messages.ERROR,
'Ballotpedia election information not attached. status: {status} '
.format(status=status))
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)) +
'?google_civic_election_id=' + str(google_civic_election_id) +
'&state_code=' + str(state_code))
@login_required
def refresh_ballotpedia_districts_for_polling_locations_view(request):
"""
This function refreshes the Ballotpedia districts used with subsequent calls to Ballotpedia:
1) Retrieve (internally) polling locations (so we can use those addresses to retrieve a
representative set of ballots)
2) Cycle through a portion of those polling locations, enough that we are caching all of the possible ballot items
3) Ask for Ballotpedia districts for each of the polling locations being analyzed
:param request:
:return:
"""
# admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'political_data_manager'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
state_code = request.GET.get('state_code', '')
import_limit = convert_to_int(request.GET.get('import_limit', 500))
polling_location_list = []
polling_location_count = 0
status = ""
if not positive_value_exists(state_code):
messages.add_message(request, messages.ERROR,
'Could not retrieve Ballotpedia data. Missing state_code.')
return HttpResponseRedirect(reverse('electoral_district:electoral_district_list', args=()))
try:
polling_location_count_query = PollingLocation.objects.all()
polling_location_count_query = polling_location_count_query.filter(state__iexact=state_code)
polling_location_count_query = polling_location_count_query.filter(use_for_bulk_retrieve=True)
polling_location_count_query = polling_location_count_query.filter(polling_location_deleted=False)
polling_location_count = polling_location_count_query.count()
if positive_value_exists(polling_location_count):
polling_location_query = PollingLocation.objects.all()
polling_location_query = polling_location_query.filter(state__iexact=state_code)
polling_location_query = polling_location_query.filter(use_for_bulk_retrieve=True)
polling_location_query = polling_location_query.filter(polling_location_deleted=False)
# We used to have a limit of 500 ballots to pull per election, but now retrieve all
# Ordering by "location_name" creates a bit of (locational) random order
polling_location_list = polling_location_query.order_by('location_name')[:import_limit]
except Exception as e:
status += "ELECTORAL_DISTRICT-COULD_NOT_FIND_POLLING_LOCATION_LIST " + str(e) + " "
if polling_location_count == 0:
# We didn't find any polling locations marked for bulk retrieve, so just retrieve up to the import_limit
try:
polling_location_count_query = PollingLocation.objects.all()
polling_location_count_query = \
polling_location_count_query.exclude(Q(latitude__isnull=True) | Q(latitude__exact=0.0))
polling_location_count_query = \
polling_location_count_query.exclude(Q(zip_long__isnull=True) | Q(zip_long__exact='0') |
Q(zip_long__exact=''))
polling_location_count_query = polling_location_count_query.filter(state__iexact=state_code)
polling_location_count_query = polling_location_count_query.filter(polling_location_deleted=False)
polling_location_count = polling_location_count_query.count()
if positive_value_exists(polling_location_count):
polling_location_query = PollingLocation.objects.all()
polling_location_query = \
polling_location_query.exclude(Q(latitude__isnull=True) | Q(latitude__exact=0.0))
polling_location_query = \
polling_location_query.exclude(Q(zip_long__isnull=True) | Q(zip_long__exact='0') |
Q(zip_long__exact=''))
polling_location_query = polling_location_query.filter(state__iexact=state_code)
polling_location_query = polling_location_query.filter(polling_location_deleted=False)
# Ordering by "location_name" creates a bit of (locational) random order
polling_location_list = polling_location_query.order_by('location_name')[:import_limit]
except PollingLocation.DoesNotExist:
messages.add_message(request, messages.INFO,
'Could not retrieve ballot data. '
'No polling locations exist for the state \'{state}\'. '
'Data needed from VIP.'.format(
state=state_code))
return HttpResponseRedirect(reverse('electoral_district:electoral_district_list', args=()))
if polling_location_count == 0:
messages.add_message(request, messages.ERROR,
'Could not retrieve ballot data. '
'No polling locations returned for the state \'{state}\'. (error 2)'.format(
state=state_code))
return HttpResponseRedirect(reverse('electoral_district:electoral_district_list', args=()))
# If here, we know that we have some polling_locations to use in order to retrieve ballotpedia districts
# Step though our set of polling locations, until we find one that contains a ballot. Some won't contain ballots
# due to data quality issues.
polling_locations_with_data = 0
polling_locations_without_data = 0
# If here we just want to retrieve the races for this election
merged_district_list = []
google_civic_election_id = 0
force_district_retrieve_from_ballotpedia = True
for polling_location in polling_location_list:
one_ballot_results = retrieve_ballotpedia_district_id_list_for_polling_location(
google_civic_election_id, polling_location=polling_location,
force_district_retrieve_from_ballotpedia=force_district_retrieve_from_ballotpedia)
success = False
if one_ballot_results['success']:
success = True
ballotpedia_district_id_list = one_ballot_results['ballotpedia_district_id_list']
if len(ballotpedia_district_id_list):
for one_ballotpedia_district_id in ballotpedia_district_id_list:
if one_ballotpedia_district_id not in merged_district_list:
# Build up a list of ballotpedia districts that we need to retrieve races for
merged_district_list.append(one_ballotpedia_district_id)
if success:
polling_locations_with_data += 1
else:
polling_locations_without_data += 1
messages.add_message(request, messages.INFO,
'Electoral data retrieved from Ballotpedia. '
'polling_locations_with_data: {polling_locations_with_data}, '
'polling_locations_without_data: {polling_locations_without_data}. '
''.format(
polling_locations_with_data=polling_locations_with_data,
polling_locations_without_data=polling_locations_without_data))
return HttpResponseRedirect(reverse('electoral_district:electoral_district_list', args=()) +
'?state_code=' + str(state_code) +
'&google_civic_election_id=' + str(google_civic_election_id))
@login_required
def retrieve_ballotpedia_candidates_by_district_from_api_view(request):
"""
Reach out to Ballotpedia API to retrieve candidates.
"""
# admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'political_data_manager'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
only_retrieve_if_zero_candidates = request.GET.get('only_retrieve_if_zero_candidates', False)
state_code = request.GET.get('state_code', "")
election_manager = ElectionManager()
election_local_id = 0
is_national_election = False
results = election_manager.retrieve_election(google_civic_election_id)
if results['election_found']:
election = results['election']
election_local_id = election.id
is_national_election = election.is_national_election
if positive_value_exists(is_national_election) and not positive_value_exists(state_code):
messages.add_message(request, messages.ERROR,
'For National elections, a State Code is required in order to run any '
'Ballotpedia data preparation.')
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)))
results = retrieve_ballotpedia_candidates_by_district_from_api(google_civic_election_id, state_code,
only_retrieve_if_zero_candidates)
kind_of_batch = ""
if 'kind_of_batch' in results:
kind_of_batch = results['kind_of_batch']
if not positive_value_exists(kind_of_batch):
kind_of_batch = CANDIDATE
batch_header_id = 0
if 'batch_saved' in results and results['batch_saved']:
messages.add_message(request, messages.INFO, 'Import batch for {google_civic_election_id} election saved.'
''.format(google_civic_election_id=google_civic_election_id))
batch_header_id = results['batch_header_id']
elif 'batch_header_id' in results and results['batch_header_id']:
messages.add_message(request, messages.INFO, 'Import batch for {google_civic_election_id} election saved, '
'batch_header_id.'
''.format(google_civic_election_id=google_civic_election_id))
batch_header_id = results['batch_header_id']
else:
messages.add_message(request, messages.ERROR, results['status'])
if positive_value_exists(batch_header_id):
# Go straight to the new batch
return HttpResponseRedirect(reverse('import_export_batches:batch_action_list', args=()) +
"?batch_header_id=" + str(batch_header_id) +
"&kind_of_batch=" + str(kind_of_batch) +
"&google_civic_election_id=" + str(google_civic_election_id))
else:
# Go to the office listing page
return HttpResponseRedirect(reverse('office:office_list', args=()) +
"?google_civic_election_id=" + str(google_civic_election_id))
@login_required
def retrieve_ballotpedia_data_for_polling_locations_view(request, election_local_id=0):
"""
Reach out to Ballotpedia and retrieve (for one election):
1) Polling locations (so we can use those addresses to retrieve a representative set of ballots)
2) Cycle through a portion of those polling locations, enough that we are caching all of the possible ballot items
:param request:
:param election_local_id:
:return:
"""
# admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'political_data_manager'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
force_district_retrieve_from_ballotpedia = request.GET.get('force_district_retrieve_from_ballotpedia', False)
state_code = request.GET.get('state_code', '')
retrieve_races = positive_value_exists(request.GET.get('retrieve_races', False))
retrieve_measures = positive_value_exists(request.GET.get('retrieve_measures', False))
import_limit = convert_to_int(request.GET.get('import_limit', 500))
polling_location_list = []
polling_location_count = 0
status = ""
try:
if positive_value_exists(election_local_id):
election_on_stage = Election.objects.get(id=election_local_id)
ballotpedia_election_id = election_on_stage.ballotpedia_election_id
google_civic_election_id = election_on_stage.google_civic_election_id
election_state_code = election_on_stage.get_election_state()
election_name = election_on_stage.election_name
is_national_election = election_on_stage.is_national_election
else:
messages.add_message(request, messages.ERROR,
'Could not retrieve Ballotpedia data. Missing election_local_id.')
return HttpResponseRedirect(reverse('election:election_list', args=()))
except Election.MultipleObjectsReturned as e:
messages.add_message(request, messages.ERROR, 'Could not retrieve Ballotpedia data. '
'More than one election found.')
return HttpResponseRedirect(reverse('election:election_list', args=()))
except Election.DoesNotExist:
messages.add_message(request, messages.ERROR, 'Could not retrieve Ballotpedia data. '
'Election could not be found.')
return HttpResponseRedirect(reverse('election:election_list', args=()))
# Check to see if we have polling location data related to the region(s) covered by this election
# We request the ballot data for each polling location as a way to build up our local data
if not positive_value_exists(state_code) and positive_value_exists(google_civic_election_id):
state_code = election_state_code
if positive_value_exists(is_national_election) and not positive_value_exists(state_code):
messages.add_message(request, messages.ERROR,
'For National elections, a State Code is required in order to run any '
'Ballotpedia data preparation.')
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)))
try:
polling_location_count_query = PollingLocation.objects.all()
polling_location_count_query = polling_location_count_query.filter(state__iexact=state_code)
polling_location_count_query = polling_location_count_query.filter(use_for_bulk_retrieve=True)
polling_location_count_query = polling_location_count_query.filter(polling_location_deleted=False)
polling_location_count = polling_location_count_query.count()
if positive_value_exists(polling_location_count):
polling_location_query = PollingLocation.objects.all()
polling_location_query = polling_location_query.filter(state__iexact=state_code)
polling_location_query = polling_location_query.filter(use_for_bulk_retrieve=True)
polling_location_query = polling_location_query.filter(polling_location_deleted=False)
# We used to have a limit of 500 ballots to pull per election, but now retrieve all
# Ordering by "location_name" creates a bit of (locational) random order
polling_location_list = polling_location_query.order_by('location_name')[:import_limit]
except Exception as e:
status += "COULD_NOT_FIND_POLLING_LOCATION_LIST " + str(e) + " "
if polling_location_count == 0:
# We didn't find any polling locations marked for bulk retrieve, so just retrieve up to the import_limit
try:
polling_location_count_query = PollingLocation.objects.all()
polling_location_count_query = \
polling_location_count_query.exclude(Q(latitude__isnull=True) | Q(latitude__exact=0.0))
polling_location_count_query = \
polling_location_count_query.exclude(Q(zip_long__isnull=True) | Q(zip_long__exact='0') |
Q(zip_long__exact=''))
polling_location_count_query = polling_location_count_query.filter(state__iexact=state_code)
polling_location_count_query = polling_location_count_query.filter(polling_location_deleted=False)
polling_location_count = polling_location_count_query.count()
if positive_value_exists(polling_location_count):
polling_location_query = PollingLocation.objects.all()
polling_location_query = \
polling_location_query.exclude(Q(latitude__isnull=True) | Q(latitude__exact=0.0))
polling_location_query = \
polling_location_query.exclude(Q(zip_long__isnull=True) | Q(zip_long__exact='0') |
Q(zip_long__exact=''))
polling_location_query = polling_location_query.filter(state__iexact=state_code)
polling_location_query = polling_location_query.filter(polling_location_deleted=False)
# Ordering by "location_name" creates a bit of (locational) random order
polling_location_list = polling_location_query.order_by('location_name')[:import_limit]
except PollingLocation.DoesNotExist:
messages.add_message(request, messages.INFO,
'Could not retrieve ballot data for the {election_name}. '
'No polling locations exist for the state \'{state}\'. '
'Data needed from VIP.'.format(
election_name=election_name,
state=state_code))
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)))
if polling_location_count == 0:
messages.add_message(request, messages.ERROR,
'Could not retrieve ballot data for the {election_name}. '
'No polling locations returned for the state \'{state}\'. (error 2)'.format(
election_name=election_name,
state=state_code))
return HttpResponseRedirect(reverse('election:election_summary', args=(election_local_id,)))
# If here, we know that we have some polling_locations to use in order to retrieve ballotpedia districts
ballots_retrieved = 0
ballots_not_retrieved = 0
# Step though our set of polling locations, until we find one that contains a ballot. Some won't contain ballots
# due to data quality issues.
if retrieve_races or retrieve_measures or force_district_retrieve_from_ballotpedia:
polling_locations_with_data = 0
polling_locations_without_data = 0
# If here we just want to retrieve the races for this election
merged_district_list = []
for polling_location in polling_location_list:
one_ballot_results = retrieve_ballotpedia_district_id_list_for_polling_location(
google_civic_election_id, polling_location=polling_location,
force_district_retrieve_from_ballotpedia=force_district_retrieve_from_ballotpedia)
success = False
if one_ballot_results['success']:
success = True
ballotpedia_district_id_list = one_ballot_results['ballotpedia_district_id_list']
if len(ballotpedia_district_id_list):
for one_ballotpedia_district_id in ballotpedia_district_id_list:
if one_ballotpedia_district_id not in merged_district_list:
# Build up a list of ballotpedia districts that we need to retrieve races for
merged_district_list.append(one_ballotpedia_district_id)
if success:
polling_locations_with_data += 1
else:
polling_locations_without_data += 1
# Once we have a summary of all ballotpedia districts, we want to request all of the races or measures
if len(merged_district_list):
kind_of_batch = "Unknown"
results = {}
if retrieve_races:
results = retrieve_ballotpedia_offices_by_district_from_api(google_civic_election_id, state_code,
merged_district_list)
kind_of_batch = ""
if 'kind_of_batch' in results:
kind_of_batch = results['kind_of_batch']
if not positive_value_exists(kind_of_batch):
kind_of_batch = CONTEST_OFFICE
status += results['status']
elif retrieve_measures:
results = retrieve_ballotpedia_measures_by_district_from_api(google_civic_election_id, state_code,
merged_district_list)
kind_of_batch = ""
if 'kind_of_batch' in results:
kind_of_batch = results['kind_of_batch']
if not positive_value_exists(kind_of_batch):
kind_of_batch = MEASURE
status += results['status']
batch_header_id = 0
if 'batch_saved' in results and results['batch_saved']:
messages.add_message(request, messages.INFO,
kind_of_batch +
' import batch for {google_civic_election_id} election saved. '
'status: {status}'
''.format(google_civic_election_id=google_civic_election_id,
status=status))
batch_header_id = results['batch_header_id']
elif 'batch_header_id' in results and results['batch_header_id']:
messages.add_message(request, messages.INFO,
kind_of_batch +
' import batch for {google_civic_election_id} election saved, '
'batch_header_id. status: {status}'
''.format(google_civic_election_id=google_civic_election_id,
status=status))
batch_header_id = results['batch_header_id']
else:
messages.add_message(request, messages.ERROR, results['status'])
if positive_value_exists(batch_header_id):
# Go straight to the new batch
return HttpResponseRedirect(reverse('import_export_batches:batch_action_list', args=()) +
"?batch_header_id=" + str(batch_header_id) +
"&kind_of_batch=" + str(kind_of_batch) +
"&google_civic_election_id=" + str(google_civic_election_id))
else:
if retrieve_races:
# Go to the office listing page
return HttpResponseRedirect(reverse('office:office_list', args=()) +
"?google_civic_election_id=" + str(google_civic_election_id))
elif retrieve_measures:
# Go to the measure listing page
return HttpResponseRedirect(reverse('measure:measure_list', args=()) +
"?google_civic_election_id=" + str(google_civic_election_id))
messages.add_message(request, messages.INFO,
'Races or measures retrieved from Ballotpedia for the {election_name}. '
'polling_locations_with_data: {polling_locations_with_data}, '
'polling_locations_without_data: {polling_locations_without_data}. '
''.format(
polling_locations_with_data=polling_locations_with_data,
polling_locations_without_data=polling_locations_with_data,
election_name=election_name))
return HttpResponseRedirect(reverse('import_export_batches:batch_set_list', args=()) +
'?kind_of_batch=IMPORT_BALLOTPEDIA_BALLOT_ITEMS' +
'&google_civic_election_id=' + str(google_civic_election_id))
else:
# Create Batch Set for ballot items
import_date = date.today()
batch_set_id = 0
batch_set_name = "Ballotpedia ballot locations for " + election_name + \
" (state " + str(state_code.upper()) + ")" + \
" - ballotpedia: " + str(ballotpedia_election_id) + \
" - " + str(import_date)
# create batch_set object
try:
batch_set = BatchSet.objects.create(batch_set_description_text="", batch_set_name=batch_set_name,
batch_set_source=BATCH_SET_SOURCE_IMPORT_BALLOTPEDIA_BALLOT_ITEMS,
google_civic_election_id=google_civic_election_id,
source_uri=BALLOTPEDIA_API_CONTAINS_URL, import_date=import_date)
batch_set_id = batch_set.id
if positive_value_exists(batch_set_id):
status += " BATCH_SET_SAVED"
success = True
except Exception as e:
# Stop trying to save rows -- break out of the for loop
status += " EXCEPTION_BATCH_SET "
# If here, we assume we have already retrieved races for this election, and now we want to
# put ballot items for this location onto a ballot
for polling_location in polling_location_list:
one_ballot_results = retrieve_ballot_items_from_polling_location(
google_civic_election_id, polling_location=polling_location, batch_set_id=batch_set_id,
state_code=state_code)
success = False
if one_ballot_results['success']:
success = True
if success:
ballots_retrieved += 1
else:
ballots_not_retrieved += 1
# We used to only retrieve up to 500 locations from each state, but we don't limit now
# # Break out of this loop, assuming we have a minimum number of ballots with contests retrieved
# # If we don't achieve the minimum number of ballots_with_contests_retrieved, break out at the emergency level
# emergency = (ballots_retrieved + ballots_not_retrieved) >= (3 * number_of_polling_locations_to_retrieve)
# if ((ballots_retrieved + ballots_not_retrieved) >= number_of_polling_locations_to_retrieve and
# ballots_with_contests_retrieved > 20) or emergency:
# break
messages.add_message(request, messages.INFO,
'Ballot data retrieved from Ballotpedia for the {election_name}. '
'ballots retrieved: {ballots_retrieved}. '
''.format(
ballots_retrieved=ballots_retrieved,
ballots_not_retrieved=ballots_not_retrieved,
election_name=election_name))
return HttpResponseRedirect(reverse('import_export_batches:batch_set_list', args=()) +
'?kind_of_batch=IMPORT_BALLOTPEDIA_BALLOT_ITEMS' +
'&google_civic_election_id=' + str(google_civic_election_id))
# @login_required
# def retrieve_ballotpedia_offices_by_election_from_api_view(request):
# """
# Reach out to Ballotpedia API to retrieve offices.
# """
# # admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
# authority_required = {'political_data_manager'}
# if not voter_has_authority(request, authority_required):
# return redirect_to_sign_in_page(request, authority_required)
#
# google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
#
# results = retrieve_ballotpedia_offices_by_election_from_api(google_civic_election_id)
#
# kind_of_batch = ""
# if 'kind_of_batch' in results:
# kind_of_batch = results['kind_of_batch']
# if not positive_value_exists(kind_of_batch):
# kind_of_batch = CONTEST_OFFICE
#
# batch_header_id = 0
# if 'batch_saved' in results and results['batch_saved']:
# messages.add_message(request, messages.INFO, 'Import batch for {google_civic_election_id} election saved.'
# ''.format(google_civic_election_id=google_civic_election_id))
# batch_header_id = results['batch_header_id']
# elif 'batch_header_id' in results and results['batch_header_id']:
# messages.add_message(request, messages.INFO, 'Import batch for {google_civic_election_id} election saved, '
# 'batch_header_id.'
# ''.format(google_civic_election_id=google_civic_election_id))
# batch_header_id = results['batch_header_id']
# else:
# messages.add_message(request, messages.ERROR, results['status'])
#
# if positive_value_exists(batch_header_id):
# # Go straight to the new batch
# return HttpResponseRedirect(reverse('import_export_batches:batch_action_list', args=()) +
# "?batch_header_id=" + str(batch_header_id) +
# "&kind_of_batch=" + str(kind_of_batch) +
# "&google_civic_election_id=" + str(google_civic_election_id))
# else:
# # Go to the office listing page
# return HttpResponseRedirect(reverse('office:office_list', args=()) +
# "?google_civic_election_id=" + str(google_civic_election_id))
| 58.835526
| 120
| 0.657967
| 5,026
| 44,715
| 5.454835
| 0.063669
| 0.094653
| 0.056828
| 0.06281
| 0.860592
| 0.837905
| 0.821746
| 0.784031
| 0.76904
| 0.757404
| 0
| 0.002806
| 0.274762
| 44,715
| 759
| 121
| 58.913043
| 0.84261
| 0.167684
| 0
| 0.731261
| 0
| 0
| 0.14819
| 0.061218
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010969
| false
| 0
| 0.084095
| 0
| 0.159049
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb6f5ee56d757b27b41b93f9d95017a036225701
| 5,827
|
py
|
Python
|
tests/test_accept.py
|
alanjcastonguay/flask-restplus
|
a8f35823fe40b2c7385632a2ad6b35b26467402c
|
[
"BSD-3-Clause"
] | 2,885
|
2015-01-01T17:40:44.000Z
|
2022-03-31T10:10:28.000Z
|
tests/test_accept.py
|
alanjcastonguay/flask-restplus
|
a8f35823fe40b2c7385632a2ad6b35b26467402c
|
[
"BSD-3-Clause"
] | 768
|
2015-01-01T17:21:48.000Z
|
2022-03-28T09:02:45.000Z
|
tests/test_accept.py
|
alanjcastonguay/flask-restplus
|
a8f35823fe40b2c7385632a2ad6b35b26467402c
|
[
"BSD-3-Clause"
] | 676
|
2015-01-05T12:53:22.000Z
|
2022-03-22T06:03:44.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import flask_restplus as restplus
class Foo(restplus.Resource):
def get(self):
return "data"
class ErrorsTest(object):
def test_accept_default_application_json(self, app, client):
api = restplus.Api(app)
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers={'Accept': None})
assert res.status_code == 200
assert res.content_type == 'application/json'
def test_accept_application_json_by_default(self, app, client):
api = restplus.Api(app)
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'application/json')])
assert res.status_code == 200
assert res.content_type == 'application/json'
def test_accept_no_default_match_acceptable(self, app, client):
api = restplus.Api(app, default_mediatype=None)
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'application/json')])
assert res.status_code == 200
assert res.content_type == 'application/json'
def test_accept_default_override_accept(self, app, client):
api = restplus.Api(app)
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status_code == 200
assert res.content_type == 'application/json'
def test_accept_default_any_pick_first(self, app, client):
api = restplus.Api(app)
@api.representation('text/plain')
def text_rep(data, status_code, headers=None):
resp = app.make_response((str(data), status_code, headers))
return resp
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', '*/*')])
assert res.status_code == 200
assert res.content_type == 'application/json'
def test_accept_no_default_no_match_not_acceptable(self, app, client):
api = restplus.Api(app, default_mediatype=None)
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status_code == 406
assert res.content_type == 'application/json'
def test_accept_no_default_custom_repr_match(self, app, client):
api = restplus.Api(app, default_mediatype=None)
api.representations = {}
@api.representation('text/plain')
def text_rep(data, status_code, headers=None):
resp = app.make_response((str(data), status_code, headers))
return resp
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status_code == 200
assert res.content_type == 'text/plain'
def test_accept_no_default_custom_repr_not_acceptable(self, app, client):
api = restplus.Api(app, default_mediatype=None)
api.representations = {}
@api.representation('text/plain')
def text_rep(data, status_code, headers=None):
resp = app.make_response((str(data), status_code, headers))
return resp
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'application/json')])
assert res.status_code == 406
assert res.content_type == 'text/plain'
def test_accept_no_default_match_q0_not_acceptable(self, app, client):
"""
q=0 should be considered NotAcceptable,
but this depends on werkzeug >= 1.0 which is not yet released
so this test is expected to fail until we depend on werkzeug >= 1.0
"""
api = restplus.Api(app, default_mediatype=None)
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'application/json; q=0')])
assert res.status_code == 406
assert res.content_type == 'application/json'
def test_accept_no_default_accept_highest_quality_of_two(self, app, client):
api = restplus.Api(app, default_mediatype=None)
@api.representation('text/plain')
def text_rep(data, status_code, headers=None):
resp = app.make_response((str(data), status_code, headers))
return resp
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'application/json; q=0.1, text/plain; q=1.0')])
assert res.status_code == 200
assert res.content_type == 'text/plain'
def test_accept_no_default_accept_highest_quality_of_three(self, app, client):
api = restplus.Api(app, default_mediatype=None)
@api.representation('text/html')
@api.representation('text/plain')
def text_rep(data, status_code, headers=None):
resp = app.make_response((str(data), status_code, headers))
return resp
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'application/json; q=0.1, text/plain; q=0.3, text/html; q=0.2')])
assert res.status_code == 200
assert res.content_type == 'text/plain'
def test_accept_no_default_no_representations(self, app, client):
api = restplus.Api(app, default_mediatype=None)
api.representations = {}
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status_code == 406
assert res.content_type == 'text/plain'
def test_accept_invalid_default_no_representations(self, app, client):
api = restplus.Api(app, default_mediatype='nonexistant/mediatype')
api.representations = {}
api.add_resource(Foo, '/test/')
res = client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status_code == 500
| 37.11465
| 120
| 0.634289
| 731
| 5,827
| 4.852257
| 0.131327
| 0.063434
| 0.047646
| 0.062306
| 0.880744
| 0.873132
| 0.869749
| 0.864111
| 0.855653
| 0.843248
| 0
| 0.012661
| 0.22739
| 5,827
| 156
| 121
| 37.352564
| 0.775211
| 0.03295
| 0
| 0.75
| 0
| 0.009259
| 0.125827
| 0.003753
| 0
| 0
| 0
| 0
| 0.231481
| 1
| 0.175926
| false
| 0
| 0.018519
| 0.009259
| 0.268519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fbbeed0f8571a8f7c1cb8909f2d22e18d5cefba6
| 404
|
py
|
Python
|
python/testData/highlighting/parametersWithAnnotationsAndDefaults.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/highlighting/parametersWithAnnotationsAndDefaults.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/highlighting/parametersWithAnnotationsAndDefaults.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def <info descr="PY.FUNC_DEFINITION">f</info>(<info descr="PY.PARAMETER">p1</info>: <info descr="PY.ANNOTATION"><info descr="PY.BUILTIN_NAME">int</info></info>, <info descr="PY.PARAMETER">p2</info>: <info descr="PY.ANNOTATION"><info descr="PY.BUILTIN_NAME">int</info></info> = 42):
<info descr="PY.BUILTIN_NAME">print</info>(<info descr="PY.PARAMETER">p1</info>, <info descr="PY.PARAMETER">p2</info>)
| 202
| 281
| 0.69802
| 64
| 404
| 4.34375
| 0.25
| 0.323741
| 0.395683
| 0.323741
| 0.870504
| 0.791367
| 0.791367
| 0.658273
| 0.658273
| 0.658273
| 0
| 0.015707
| 0.054455
| 404
| 2
| 282
| 202
| 0.712042
| 0
| 0
| 0
| 0
| 0
| 0.338272
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
fbcdd4b3d3cd82e73d5ad1a10aba5cdf6ca59a3a
| 37
|
py
|
Python
|
bases/nice-python/fancy_python_library/fancy_python_library/fancy.py
|
cloderic/docker-monorepo
|
f8652303422cc1a82b93b7bcbe6dea3c5982777d
|
[
"WTFPL"
] | 31
|
2019-02-25T13:56:43.000Z
|
2022-01-29T14:57:01.000Z
|
bases/nice-python/fancy_python_library/fancy_python_library/fancy.py
|
cloderic/docker-monorepo
|
f8652303422cc1a82b93b7bcbe6dea3c5982777d
|
[
"WTFPL"
] | 1
|
2019-08-05T08:47:00.000Z
|
2020-08-15T13:43:56.000Z
|
bases/nice-python/fancy_python_library/fancy_python_library/fancy.py
|
cloderic/docker-monorepo
|
f8652303422cc1a82b93b7bcbe6dea3c5982777d
|
[
"WTFPL"
] | 8
|
2019-06-06T06:12:12.000Z
|
2022-02-10T09:56:28.000Z
|
def get_fancy():
return "oh la la"
| 12.333333
| 19
| 0.648649
| 7
| 37
| 3.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 37
| 2
| 20
| 18.5
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
8382953914ef10515683f9fb87ce6e7809b3ce9d
| 42
|
py
|
Python
|
id_generator/__init__.py
|
fishball926/id_generator
|
21ad00f38b75d4baed1bbe79543dccd221577daa
|
[
"MIT"
] | null | null | null |
id_generator/__init__.py
|
fishball926/id_generator
|
21ad00f38b75d4baed1bbe79543dccd221577daa
|
[
"MIT"
] | null | null | null |
id_generator/__init__.py
|
fishball926/id_generator
|
21ad00f38b75d4baed1bbe79543dccd221577daa
|
[
"MIT"
] | null | null | null |
from id_generator.File1 import id_generate
| 42
| 42
| 0.904762
| 7
| 42
| 5.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025641
| 0.071429
| 42
| 1
| 42
| 42
| 0.897436
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
83990978e8ab2375d0a628b3989192a999bb65c9
| 740,395
|
py
|
Python
|
py/flexswitchV2.py
|
learnopx/opx-flxSdk
|
a87219669164b32b5345c407d4aecbe3f4ec6226
|
[
"Apache-2.0"
] | null | null | null |
py/flexswitchV2.py
|
learnopx/opx-flxSdk
|
a87219669164b32b5345c407d4aecbe3f4ec6226
|
[
"Apache-2.0"
] | null | null | null |
py/flexswitchV2.py
|
learnopx/opx-flxSdk
|
a87219669164b32b5345c407d4aecbe3f4ec6226
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
import requests
import json
import urllib2
from requests.packages.urllib3.exceptions import InsecureRequestWarning
headers = {'Accept' : 'application/json', 'Content-Type' : 'application/json'}
patchheaders = {'Conent-Type':'application/json-patch+json'}
#def processReturnCode (method) :
# def returnDetails (self, *args, **kwargs) :
# r = method(self, *args, **kwargs)
# if r.status_code in self.httpSuccessCodes:
# return (r.json(), None)
# else:
# ret = {}
# try:
# ret = r.json()
# except:
# print 'Did not receive Json. HTTP Status %s: Code %s ' %(r.reason, r.status_code)
# return ret, r.reason
# print 'Error from server. Error code %s, Error Message: %s' %(r.status_code, r.json()['Error'])
# return (r.json(), "Error")
# return returnDetails
class FlexSwitch( object):
httpSuccessCodes = [200, 201, 202, 204]
def __init__ (self, ip, port, user=None, passwd=None, timeout=15):
self.ip = ip
self.port = port
self.timeout = timeout
self.authenticate = False
if user is not None:
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
self.authenticate = True
self.user = user
self.passwd = passwd
self.cfgUrlBase = 'https://%s/public/v1/config/'%(ip)
self.stateUrlBase = 'https://%s/public/v1/state/'%(ip)
self.actionUrlBase = 'https://%s/public/v1/action/'%(ip)
else:
self.cfgUrlBase = 'http://%s:%s/public/v1/config/'%(ip,str(port))
self.stateUrlBase = 'http://%s:%s/public/v1/state/'%(ip,str(port))
self.actionUrlBase = 'http://%s:%s/public/v1/action/'%(ip,str(port))
def getObjects(self, objName, urlPath):
currentMarker = 0
nextMarker = 0
count = 100
more = True
entries = []
while more == True:
more = False
qry = '%s/%ss?CurrentMarker=%d&NextMarker=%d&Count=%d' %(urlPath, objName, currentMarker, nextMarker, count)
if self.authenticate == True:
response = requests.get(qry, timeout=self.timeout, auth=(self.user, self.passwd), varify=False)
else:
response = requests.get(qry, timeout=self.timeout)
if response.status_code in self.httpSuccessCodes:
data = response.json()
more = data['MoreExist']
currentMarker = data['NextMarker']
NextMarker = data['NextMarker']
if data['Objects'] != None:
entries.extend(data['Objects'])
else:
print 'Server returned Error for %s' %(qry)
return entries
def getObject(self, objName, obj, urlPath):
qry = '%s/%s' %(urlPath, objName)
if self.authenticate == True:
response = requests.get(qry, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
response = requests.get(qry, data=json.dumps(obj), headers=headers, timeout=self.timeout)
if response.status_code in self.httpSuccessCodes:
data = response.json()
if data['Object'] != None:
entry = (data['Object'])
else:
print 'Server returned Error for %s' %(qry)
return entry
def getObjectById(self, objName, Id, urlPath):
qry = '%s/%s/%s' %(urlPath, objName, Id)
if self.authenticate == True:
response = requests.get(qry, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
response = requests.get(qry, headers=headers, timeout=self.timeout)
if response.status_code in self.httpSuccessCodes:
data = response.json()
if data['Object'] != None:
entry = (data['Object'])
else:
print 'Server returned Error for %s' %(qry)
return entry
def getArpEntryState(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.stateUrlBase + 'ArpEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getArpEntryStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'ArpEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllArpEntryStates(self):
return self.getObjects('ArpEntry', self.stateUrlBase)
def getPlatformMgmtDeviceState(self,
DeviceName):
obj = {
'DeviceName' : DeviceName,
}
reqUrl = self.stateUrlBase + 'PlatformMgmtDevice'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPlatformMgmtDeviceStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PlatformMgmtDevice'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPlatformMgmtDeviceStates(self):
return self.getObjects('PlatformMgmtDevice', self.stateUrlBase)
def getOspfIPv4RouteState(self,
DestId,
DestType,
AddrMask):
obj = {
'DestId' : DestId,
'DestType' : DestType,
'AddrMask' : AddrMask,
}
reqUrl = self.stateUrlBase + 'OspfIPv4Route'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfIPv4RouteStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'OspfIPv4Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfIPv4RouteStates(self):
return self.getObjects('OspfIPv4Route', self.stateUrlBase)
def getOspfv2IntfState(self,
AddressLessIfIdx,
IpAddress):
obj = {
'AddressLessIfIdx' : int(AddressLessIfIdx),
'IpAddress' : IpAddress,
}
reqUrl = self.stateUrlBase + 'Ospfv2Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfv2IntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Ospfv2Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfv2IntfStates(self):
return self.getObjects('Ospfv2Intf', self.stateUrlBase)
def getNdpEntryHwState(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.stateUrlBase + 'NdpEntryHw'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getNdpEntryHwStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'NdpEntryHw'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllNdpEntryHwStates(self):
return self.getObjects('NdpEntryHw', self.stateUrlBase)
def getPolicyExtendedCommunitySetState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'PolicyExtendedCommunitySet'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyExtendedCommunitySetStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PolicyExtendedCommunitySet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyExtendedCommunitySetStates(self):
return self.getObjects('PolicyExtendedCommunitySet', self.stateUrlBase)
"""
.. automethod :: executeFaultEnable(self,
:param string OwnerName : Fault owner name Fault owner name
:param string EventName : Fault event name Fault event name
:param bool Enable : Enable/Disbale control Enable/Disbale control
"""
def executeFaultEnable(self,
OwnerName,
EventName,
Enable):
obj = {
'OwnerName' : OwnerName,
'EventName' : EventName,
'Enable' : True if Enable else False,
}
reqUrl = self.actionUrlBase+'FaultEnable'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: createPolicyStmt(self,
:param string Name : Policy Statement Name Policy Statement Name
:param PolicyAction SetActions : A set of attr/value pairs to be set associatded with this statement. A set of attr/value pairs to be set associatded with this statement.
:param string Conditions : List of conditions added to this policy statement List of conditions added to this policy statement
:param string Action : Action for this policy statement Action for this policy statement
:param string MatchConditions : Specifies whether to match all/any of the conditions of this policy statement Specifies whether to match all/any of the conditions of this policy statement
"""
def createPolicyStmt(self,
Name,
SetActions,
Conditions,
Action='deny',
MatchConditions='all'):
obj = {
'Name' : Name,
'SetActions' : SetActions,
'Conditions' : Conditions,
'Action' : Action,
'MatchConditions' : MatchConditions,
}
reqUrl = self.cfgUrlBase+'PolicyStmt'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyStmt(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'PolicyStmt'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyStmtById(self, objectId ):
reqUrl = self.cfgUrlBase+'PolicyStmt'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePolicyStmt(self,
Name,
SetActions = None,
Conditions = None,
Action = None,
MatchConditions = None):
obj = {}
if Name != None :
obj['Name'] = Name
if SetActions != None :
obj['SetActions'] = SetActions
if Conditions != None :
obj['Conditions'] = Conditions
if Action != None :
obj['Action'] = Action
if MatchConditions != None :
obj['MatchConditions'] = MatchConditions
reqUrl = self.cfgUrlBase+'PolicyStmt'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePolicyStmtById(self,
objectId,
SetActions = None,
Conditions = None,
Action = None,
MatchConditions = None):
obj = {}
if SetActions != None:
obj['SetActions'] = SetActions
if Conditions != None:
obj['Conditions'] = Conditions
if Action != None:
obj['Action'] = Action
if MatchConditions != None:
obj['MatchConditions'] = MatchConditions
reqUrl = self.cfgUrlBase+'PolicyStmt'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePolicyStmt(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'PolicyStmt'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPolicyStmt(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'PolicyStmt'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyStmtById(self, objectId ):
reqUrl = self.cfgUrlBase + 'PolicyStmt'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyStmts(self):
return self.getObjects('PolicyStmt', self.cfgUrlBase)
"""
.. automethod :: createOspfv2Intf(self,
:param uint32 AddressLessIfIdx : For the purpose of easing the instancing of addressed and addressless interfaces; this variable takes the value 0 on interfaces with IP addresses and the corresponding value of ifIndex for interfaces having no IP address. For the purpose of easing the instancing of addressed and addressless interfaces; this variable takes the value 0 on interfaces with IP addresses and the corresponding value of ifIndex for interfaces having no IP address.
:param string IpAddress : The IP address of this OSPF interface. The IP address of this OSPF interface.
:param string AreaId : A 32-bit integer uniquely identifying the area to which the interface connects. Area ID 0.0.0.0 is used for the OSPF backbone. A 32-bit integer uniquely identifying the area to which the interface connects. Area ID 0.0.0.0 is used for the OSPF backbone.
:param uint16 MetricValue : The metric of using this Type of Service on this interface. The default value of the TOS 0 metric is 10^8 / ifSpeed. The metric of using this Type of Service on this interface. The default value of the TOS 0 metric is 10^8 / ifSpeed.
:param uint16 HelloInterval : The length of time The length of time
:param string Type : The OSPF interface type. By way of a default The OSPF interface type. By way of a default
:param uint32 RtrDeadInterval : The number of seconds that a router's Hello packets have not been seen before its neighbors declare the router down. This should be some multiple of the Hello interval. This value must be the same for all routers attached to a common network. The number of seconds that a router's Hello packets have not been seen before its neighbors declare the router down. This should be some multiple of the Hello interval. This value must be the same for all routers attached to a common network.
:param uint16 RetransInterval : The number of seconds between link state advertisement retransmissions The number of seconds between link state advertisement retransmissions
:param string AdminState : Indiacates if OSPF is enabled on this interface Indiacates if OSPF is enabled on this interface
:param uint8 RtrPriority : The priority of this interface. Used in multi-access networks The priority of this interface. Used in multi-access networks
:param uint16 TransitDelay : The estimated number of seconds it takes to transmit a link state update packet over this interface. Note that the minimal value SHOULD be 1 second. The estimated number of seconds it takes to transmit a link state update packet over this interface. Note that the minimal value SHOULD be 1 second.
"""
def createOspfv2Intf(self,
AddressLessIfIdx,
IpAddress,
AreaId='0.0.0.0',
MetricValue=10,
HelloInterval=10,
Type='Broadcast',
RtrDeadInterval=40,
RetransInterval=5,
AdminState='DOWN',
RtrPriority=1,
TransitDelay=1):
obj = {
'AddressLessIfIdx' : int(AddressLessIfIdx),
'IpAddress' : IpAddress,
'AreaId' : AreaId,
'MetricValue' : int(MetricValue),
'HelloInterval' : int(HelloInterval),
'Type' : Type,
'RtrDeadInterval' : int(RtrDeadInterval),
'RetransInterval' : int(RetransInterval),
'AdminState' : AdminState,
'RtrPriority' : int(RtrPriority),
'TransitDelay' : int(TransitDelay),
}
reqUrl = self.cfgUrlBase+'Ospfv2Intf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfv2Intf(self,
AddressLessIfIdx,
IpAddress):
obj = {
'AddressLessIfIdx' : AddressLessIfIdx,
'IpAddress' : IpAddress,
}
reqUrl = self.cfgUrlBase+'Ospfv2Intf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfv2IntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'Ospfv2Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateOspfv2Intf(self,
AddressLessIfIdx,
IpAddress,
AreaId = None,
MetricValue = None,
HelloInterval = None,
Type = None,
RtrDeadInterval = None,
RetransInterval = None,
AdminState = None,
RtrPriority = None,
TransitDelay = None):
obj = {}
if AddressLessIfIdx != None :
obj['AddressLessIfIdx'] = int(AddressLessIfIdx)
if IpAddress != None :
obj['IpAddress'] = IpAddress
if AreaId != None :
obj['AreaId'] = AreaId
if MetricValue != None :
obj['MetricValue'] = int(MetricValue)
if HelloInterval != None :
obj['HelloInterval'] = int(HelloInterval)
if Type != None :
obj['Type'] = Type
if RtrDeadInterval != None :
obj['RtrDeadInterval'] = int(RtrDeadInterval)
if RetransInterval != None :
obj['RetransInterval'] = int(RetransInterval)
if AdminState != None :
obj['AdminState'] = AdminState
if RtrPriority != None :
obj['RtrPriority'] = int(RtrPriority)
if TransitDelay != None :
obj['TransitDelay'] = int(TransitDelay)
reqUrl = self.cfgUrlBase+'Ospfv2Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateOspfv2IntfById(self,
objectId,
AreaId = None,
MetricValue = None,
HelloInterval = None,
Type = None,
RtrDeadInterval = None,
RetransInterval = None,
AdminState = None,
RtrPriority = None,
TransitDelay = None):
obj = {}
if AreaId != None:
obj['AreaId'] = AreaId
if MetricValue != None:
obj['MetricValue'] = MetricValue
if HelloInterval != None:
obj['HelloInterval'] = HelloInterval
if Type != None:
obj['Type'] = Type
if RtrDeadInterval != None:
obj['RtrDeadInterval'] = RtrDeadInterval
if RetransInterval != None:
obj['RetransInterval'] = RetransInterval
if AdminState != None:
obj['AdminState'] = AdminState
if RtrPriority != None:
obj['RtrPriority'] = RtrPriority
if TransitDelay != None:
obj['TransitDelay'] = TransitDelay
reqUrl = self.cfgUrlBase+'Ospfv2Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateOspfv2Intf(self,
AddressLessIfIdx,
IpAddress,
op,
path,
value,):
obj = {}
obj['AddressLessIfIdx'] = AddressLessIfIdx
obj['IpAddress'] = IpAddress
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Ospfv2Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getOspfv2Intf(self,
AddressLessIfIdx,
IpAddress):
obj = {
'AddressLessIfIdx' : int(AddressLessIfIdx),
'IpAddress' : IpAddress,
}
reqUrl = self.cfgUrlBase + 'Ospfv2Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfv2IntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Ospfv2Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfv2Intfs(self):
return self.getObjects('Ospfv2Intf', self.cfgUrlBase)
"""
.. automethod :: createQsfpChannel(self,
:param int32 ChannelNum : Qsfp Channel Number Qsfp Channel Number
:param int32 QsfpId : Qsfp Id Qsfp Id
:param float64 HigherAlarmRXPower : Higher Alarm Rx power Threshold for TCA Higher Alarm Rx power Threshold for TCA
:param float64 HigherAlarmTXPower : Higher Alarm Rx power for TCA Higher Alarm Rx power for TCA
:param float64 HigherAlarmTXBias : Higher Alarm Tx Current Bias for TCA Higher Alarm Tx Current Bias for TCA
:param float64 HigherWarningRXPower : Higher Warning Rx power Threshold for TCA Higher Warning Rx power Threshold for TCA
:param float64 HigherWarningTXPower : Higher Warning Rx power for TCA Higher Warning Rx power for TCA
:param float64 HigherWarningTXBias : Higher Warning Tx Current Bias for TCA Higher Warning Tx Current Bias for TCA
:param float64 LowerAlarmRXPower : Lower Alarm Rx power Threshold for TCA Lower Alarm Rx power Threshold for TCA
:param float64 LowerAlarmTXPower : Lower Alarm Rx power for TCA Lower Alarm Rx power for TCA
:param float64 LowerAlarmTXBias : Lower Alarm Tx Current Bias for TCA Lower Alarm Tx Current Bias for TCA
:param float64 LowerWarningRXPower : Lower Warning Rx power Threshold for TCA Lower Warning Rx power Threshold for TCA
:param float64 LowerWarningTXPower : Lower Warning Rx power for TCA Lower Warning Rx power for TCA
:param float64 LowerWarningTXBias : Lower Warning Tx Current Bias for TCA Lower Warning Tx Current Bias for TCA
:param string PMClassBAdminState : PM Class-B Admin State PM Class-B Admin State
:param string PMClassCAdminState : PM Class-C Admin State PM Class-C Admin State
:param string PMClassAAdminState : PM Class-A Admin State PM Class-A Admin State
:param string AdminState : Enable/Disable Enable/Disable
"""
def createQsfpChannel(self,
ChannelNum,
QsfpId,
HigherAlarmRXPower,
HigherAlarmTXPower,
HigherAlarmTXBias,
HigherWarningRXPower,
HigherWarningTXPower,
HigherWarningTXBias,
LowerAlarmRXPower,
LowerAlarmTXPower,
LowerAlarmTXBias,
LowerWarningRXPower,
LowerWarningTXPower,
LowerWarningTXBias,
PMClassBAdminState='Disable',
PMClassCAdminState='Disable',
PMClassAAdminState='Disable',
AdminState='Disable'):
obj = {
'ChannelNum' : int(ChannelNum),
'QsfpId' : int(QsfpId),
'HigherAlarmRXPower' : HigherAlarmRXPower,
'HigherAlarmTXPower' : HigherAlarmTXPower,
'HigherAlarmTXBias' : HigherAlarmTXBias,
'HigherWarningRXPower' : HigherWarningRXPower,
'HigherWarningTXPower' : HigherWarningTXPower,
'HigherWarningTXBias' : HigherWarningTXBias,
'LowerAlarmRXPower' : LowerAlarmRXPower,
'LowerAlarmTXPower' : LowerAlarmTXPower,
'LowerAlarmTXBias' : LowerAlarmTXBias,
'LowerWarningRXPower' : LowerWarningRXPower,
'LowerWarningTXPower' : LowerWarningTXPower,
'LowerWarningTXBias' : LowerWarningTXBias,
'PMClassBAdminState' : PMClassBAdminState,
'PMClassCAdminState' : PMClassCAdminState,
'PMClassAAdminState' : PMClassAAdminState,
'AdminState' : AdminState,
}
reqUrl = self.cfgUrlBase+'QsfpChannel'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteQsfpChannel(self,
ChannelNum,
QsfpId):
obj = {
'ChannelNum' : ChannelNum,
'QsfpId' : QsfpId,
}
reqUrl = self.cfgUrlBase+'QsfpChannel'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteQsfpChannelById(self, objectId ):
reqUrl = self.cfgUrlBase+'QsfpChannel'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateQsfpChannel(self,
ChannelNum,
QsfpId,
HigherAlarmRXPower = None,
HigherAlarmTXPower = None,
HigherAlarmTXBias = None,
HigherWarningRXPower = None,
HigherWarningTXPower = None,
HigherWarningTXBias = None,
LowerAlarmRXPower = None,
LowerAlarmTXPower = None,
LowerAlarmTXBias = None,
LowerWarningRXPower = None,
LowerWarningTXPower = None,
LowerWarningTXBias = None,
PMClassBAdminState = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None):
obj = {}
if ChannelNum != None :
obj['ChannelNum'] = int(ChannelNum)
if QsfpId != None :
obj['QsfpId'] = int(QsfpId)
if HigherAlarmRXPower != None :
obj['HigherAlarmRXPower'] = HigherAlarmRXPower
if HigherAlarmTXPower != None :
obj['HigherAlarmTXPower'] = HigherAlarmTXPower
if HigherAlarmTXBias != None :
obj['HigherAlarmTXBias'] = HigherAlarmTXBias
if HigherWarningRXPower != None :
obj['HigherWarningRXPower'] = HigherWarningRXPower
if HigherWarningTXPower != None :
obj['HigherWarningTXPower'] = HigherWarningTXPower
if HigherWarningTXBias != None :
obj['HigherWarningTXBias'] = HigherWarningTXBias
if LowerAlarmRXPower != None :
obj['LowerAlarmRXPower'] = LowerAlarmRXPower
if LowerAlarmTXPower != None :
obj['LowerAlarmTXPower'] = LowerAlarmTXPower
if LowerAlarmTXBias != None :
obj['LowerAlarmTXBias'] = LowerAlarmTXBias
if LowerWarningRXPower != None :
obj['LowerWarningRXPower'] = LowerWarningRXPower
if LowerWarningTXPower != None :
obj['LowerWarningTXPower'] = LowerWarningTXPower
if LowerWarningTXBias != None :
obj['LowerWarningTXBias'] = LowerWarningTXBias
if PMClassBAdminState != None :
obj['PMClassBAdminState'] = PMClassBAdminState
if PMClassCAdminState != None :
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None :
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'QsfpChannel'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateQsfpChannelById(self,
objectId,
HigherAlarmRXPower = None,
HigherAlarmTXPower = None,
HigherAlarmTXBias = None,
HigherWarningRXPower = None,
HigherWarningTXPower = None,
HigherWarningTXBias = None,
LowerAlarmRXPower = None,
LowerAlarmTXPower = None,
LowerAlarmTXBias = None,
LowerWarningRXPower = None,
LowerWarningTXPower = None,
LowerWarningTXBias = None,
PMClassBAdminState = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None):
obj = {}
if HigherAlarmRXPower != None:
obj['HigherAlarmRXPower'] = HigherAlarmRXPower
if HigherAlarmTXPower != None:
obj['HigherAlarmTXPower'] = HigherAlarmTXPower
if HigherAlarmTXBias != None:
obj['HigherAlarmTXBias'] = HigherAlarmTXBias
if HigherWarningRXPower != None:
obj['HigherWarningRXPower'] = HigherWarningRXPower
if HigherWarningTXPower != None:
obj['HigherWarningTXPower'] = HigherWarningTXPower
if HigherWarningTXBias != None:
obj['HigherWarningTXBias'] = HigherWarningTXBias
if LowerAlarmRXPower != None:
obj['LowerAlarmRXPower'] = LowerAlarmRXPower
if LowerAlarmTXPower != None:
obj['LowerAlarmTXPower'] = LowerAlarmTXPower
if LowerAlarmTXBias != None:
obj['LowerAlarmTXBias'] = LowerAlarmTXBias
if LowerWarningRXPower != None:
obj['LowerWarningRXPower'] = LowerWarningRXPower
if LowerWarningTXPower != None:
obj['LowerWarningTXPower'] = LowerWarningTXPower
if LowerWarningTXBias != None:
obj['LowerWarningTXBias'] = LowerWarningTXBias
if PMClassBAdminState != None:
obj['PMClassBAdminState'] = PMClassBAdminState
if PMClassCAdminState != None:
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None:
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'QsfpChannel'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateQsfpChannel(self,
ChannelNum,
QsfpId,
op,
path,
value,):
obj = {}
obj['ChannelNum'] = ChannelNum
obj['QsfpId'] = QsfpId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'QsfpChannel'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getQsfpChannel(self,
ChannelNum,
QsfpId):
obj = {
'ChannelNum' : int(ChannelNum),
'QsfpId' : int(QsfpId),
}
reqUrl = self.cfgUrlBase + 'QsfpChannel'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getQsfpChannelById(self, objectId ):
reqUrl = self.cfgUrlBase + 'QsfpChannel'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllQsfpChannels(self):
return self.getObjects('QsfpChannel', self.cfgUrlBase)
def updateDHCPv6RelayGlobal(self,
Vrf,
HopCountLimit = None,
Enable = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if HopCountLimit != None :
obj['HopCountLimit'] = int(HopCountLimit)
if Enable != None :
obj['Enable'] = True if Enable else False
reqUrl = self.cfgUrlBase+'DHCPv6RelayGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDHCPv6RelayGlobalById(self,
objectId,
HopCountLimit = None,
Enable = None):
obj = {}
if HopCountLimit != None:
obj['HopCountLimit'] = HopCountLimit
if Enable != None:
obj['Enable'] = Enable
reqUrl = self.cfgUrlBase+'DHCPv6RelayGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDHCPv6RelayGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DHCPv6RelayGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDHCPv6RelayGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'DHCPv6RelayGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPv6RelayGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DHCPv6RelayGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPv6RelayGlobals(self):
return self.getObjects('DHCPv6RelayGlobal', self.cfgUrlBase)
"""
.. automethod :: createPowerConverterSensor(self,
:param string Name : Power Converter Sensor Name Power Converter Sensor Name
:param float64 HigherAlarmThreshold : Higher Alarm Threshold for TCA Higher Alarm Threshold for TCA
:param float64 HigherWarningThreshold : Higher Warning Threshold for TCA Higher Warning Threshold for TCA
:param float64 LowerWarningThreshold : Lower Warning Threshold for TCA Lower Warning Threshold for TCA
:param float64 LowerAlarmThreshold : Lower Alarm Threshold for TCA Lower Alarm Threshold for TCA
:param string PMClassCAdminState : PM Class-C Admin State PM Class-C Admin State
:param string PMClassAAdminState : PM Class-A Admin State PM Class-A Admin State
:param string AdminState : Enable/Disable Enable/Disable
:param string PMClassBAdminState : PM Class-B Admin State PM Class-B Admin State
"""
def createPowerConverterSensor(self,
Name,
HigherAlarmThreshold,
HigherWarningThreshold,
LowerWarningThreshold,
LowerAlarmThreshold,
PMClassCAdminState='Enable',
PMClassAAdminState='Enable',
AdminState='Enable',
PMClassBAdminState='Enable'):
obj = {
'Name' : Name,
'HigherAlarmThreshold' : HigherAlarmThreshold,
'HigherWarningThreshold' : HigherWarningThreshold,
'LowerWarningThreshold' : LowerWarningThreshold,
'LowerAlarmThreshold' : LowerAlarmThreshold,
'PMClassCAdminState' : PMClassCAdminState,
'PMClassAAdminState' : PMClassAAdminState,
'AdminState' : AdminState,
'PMClassBAdminState' : PMClassBAdminState,
}
reqUrl = self.cfgUrlBase+'PowerConverterSensor'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePowerConverterSensor(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'PowerConverterSensor'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePowerConverterSensorById(self, objectId ):
reqUrl = self.cfgUrlBase+'PowerConverterSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePowerConverterSensor(self,
Name,
HigherAlarmThreshold = None,
HigherWarningThreshold = None,
LowerWarningThreshold = None,
LowerAlarmThreshold = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None,
PMClassBAdminState = None):
obj = {}
if Name != None :
obj['Name'] = Name
if HigherAlarmThreshold != None :
obj['HigherAlarmThreshold'] = HigherAlarmThreshold
if HigherWarningThreshold != None :
obj['HigherWarningThreshold'] = HigherWarningThreshold
if LowerWarningThreshold != None :
obj['LowerWarningThreshold'] = LowerWarningThreshold
if LowerAlarmThreshold != None :
obj['LowerAlarmThreshold'] = LowerAlarmThreshold
if PMClassCAdminState != None :
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None :
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None :
obj['AdminState'] = AdminState
if PMClassBAdminState != None :
obj['PMClassBAdminState'] = PMClassBAdminState
reqUrl = self.cfgUrlBase+'PowerConverterSensor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePowerConverterSensorById(self,
objectId,
HigherAlarmThreshold = None,
HigherWarningThreshold = None,
LowerWarningThreshold = None,
LowerAlarmThreshold = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None,
PMClassBAdminState = None):
obj = {}
if HigherAlarmThreshold != None:
obj['HigherAlarmThreshold'] = HigherAlarmThreshold
if HigherWarningThreshold != None:
obj['HigherWarningThreshold'] = HigherWarningThreshold
if LowerWarningThreshold != None:
obj['LowerWarningThreshold'] = LowerWarningThreshold
if LowerAlarmThreshold != None:
obj['LowerAlarmThreshold'] = LowerAlarmThreshold
if PMClassCAdminState != None:
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None:
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None:
obj['AdminState'] = AdminState
if PMClassBAdminState != None:
obj['PMClassBAdminState'] = PMClassBAdminState
reqUrl = self.cfgUrlBase+'PowerConverterSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePowerConverterSensor(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'PowerConverterSensor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPowerConverterSensor(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'PowerConverterSensor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPowerConverterSensorById(self, objectId ):
reqUrl = self.cfgUrlBase + 'PowerConverterSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPowerConverterSensors(self):
return self.getObjects('PowerConverterSensor', self.cfgUrlBase)
"""
.. automethod :: createVlan(self,
:param int32 VlanId : 802.1Q tag/Vlan ID for vlan being provisioned 802.1Q tag/Vlan ID for vlan being provisioned
:param string IntfList : List of interface names or ifindex values to be added as tagged members of the vlan List of interface names or ifindex values to be added as tagged members of the vlan
:param string UntagIntfList : List of interface names or ifindex values to be added as untagged members of the vlan List of interface names or ifindex values to be added as untagged members of the vlan
:param string Description : Description about the vlan interface Description about the vlan interface
:param string AutoState : Auto State of this vlan interface Auto State of this vlan interface
:param string AdminState : Administrative state of this vlan interface Administrative state of this vlan interface
"""
def createVlan(self,
VlanId,
IntfList,
UntagIntfList,
Description='none',
AutoState='UP',
AdminState='UP'):
obj = {
'VlanId' : int(VlanId),
'IntfList' : IntfList,
'UntagIntfList' : UntagIntfList,
'Description' : Description,
'AutoState' : AutoState,
'AdminState' : AdminState,
}
reqUrl = self.cfgUrlBase+'Vlan'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVlan(self,
VlanId):
obj = {
'VlanId' : VlanId,
}
reqUrl = self.cfgUrlBase+'Vlan'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVlanById(self, objectId ):
reqUrl = self.cfgUrlBase+'Vlan'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateVlan(self,
VlanId,
IntfList = None,
UntagIntfList = None,
Description = None,
AutoState = None,
AdminState = None):
obj = {}
if VlanId != None :
obj['VlanId'] = int(VlanId)
if IntfList != None :
obj['IntfList'] = IntfList
if UntagIntfList != None :
obj['UntagIntfList'] = UntagIntfList
if Description != None :
obj['Description'] = Description
if AutoState != None :
obj['AutoState'] = AutoState
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'Vlan'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateVlanById(self,
objectId,
IntfList = None,
UntagIntfList = None,
Description = None,
AutoState = None,
AdminState = None):
obj = {}
if IntfList != None:
obj['IntfList'] = IntfList
if UntagIntfList != None:
obj['UntagIntfList'] = UntagIntfList
if Description != None:
obj['Description'] = Description
if AutoState != None:
obj['AutoState'] = AutoState
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'Vlan'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateVlan(self,
VlanId,
op,
path,
value,):
obj = {}
obj['VlanId'] = VlanId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Vlan'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getVlan(self,
VlanId):
obj = {
'VlanId' : int(VlanId),
}
reqUrl = self.cfgUrlBase + 'Vlan'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVlanById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Vlan'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVlans(self):
return self.getObjects('Vlan', self.cfgUrlBase)
"""
.. automethod :: createDWDMModuleNwIntf(self,
:param uint8 NwIntfId : DWDM Module network interface identifier DWDM Module network interface identifier
:param uint8 ModuleId : DWDM Module identifier DWDM Module identifier
:param uint8 ClntIntfIdToTributary0Map : Client interface ID to map to network interface tributary 0 Client interface ID to map to network interface tributary 0
:param uint8 ClntIntfIdToTributary1Map : Client interface ID to map to network interface tributary 1 Client interface ID to map to network interface tributary 1
:param bool EnableRxPRBSChecker : Enable RX PRBS checker Enable RX PRBS checker
:param float64 TxPulseShapeFltrRollOff : TX pulse shape filter roll off factor TX pulse shape filter roll off factor
:param float64 TxPower : Transmit output power for this network interface in dBm Transmit output power for this network interface in dBm
:param bool RxPRBSInvertPattern : Check against inverted PRBS polynomial pattern Check against inverted PRBS polynomial pattern
:param float64 TxPowerRampdBmPerSec : Rate of change of tx power on this network interface Rate of change of tx power on this network interface
:param bool EnableTxPRBS : Enable TX PRBS generation on this network interface Enable TX PRBS generation on this network interface
:param bool TxPRBSInvertPattern : Generate inverted PRBS polynomial pattern Generate inverted PRBS polynomial pattern
:param string AdminState : Administrative state of this network interface Administrative state of this network interface
:param uint8 ChannelNumber : TX Channel number to use for this network interface TX Channel number to use for this network interface
:param string FECMode : DWDM Module network interface FEC mode DWDM Module network interface FEC mode
:param string ModulationFmt : Modulation format to use for this network interface Modulation format to use for this network interface
:param string TxPulseShapeFltrType : TX pulse shaping filter type TX pulse shaping filter type
:param string RxPRBSPattern : PRBS pattern to use for checker PRBS pattern to use for checker
:param string TxPRBSPattern : Pattern to use for TX PRBS generation Pattern to use for TX PRBS generation
:param bool DiffEncoding : Control to enable/disable DWDM Module network interface encoding type Control to enable/disable DWDM Module network interface encoding type
"""
def createDWDMModuleNwIntf(self,
NwIntfId,
ModuleId,
ClntIntfIdToTributary0Map,
ClntIntfIdToTributary1Map,
EnableRxPRBSChecker=False,
TxPulseShapeFltrRollOff='0.301',
TxPower='0',
RxPRBSInvertPattern=True,
TxPowerRampdBmPerSec='1',
EnableTxPRBS=False,
TxPRBSInvertPattern=True,
AdminState='UP',
ChannelNumber=48,
FECMode='15%SDFEC',
ModulationFmt='16QAM',
TxPulseShapeFltrType='RootRaisedCos',
RxPRBSPattern='2^31',
TxPRBSPattern='2^31',
DiffEncoding=True):
obj = {
'NwIntfId' : int(NwIntfId),
'ModuleId' : int(ModuleId),
'ClntIntfIdToTributary0Map' : int(ClntIntfIdToTributary0Map),
'ClntIntfIdToTributary1Map' : int(ClntIntfIdToTributary1Map),
'EnableRxPRBSChecker' : True if EnableRxPRBSChecker else False,
'TxPulseShapeFltrRollOff' : TxPulseShapeFltrRollOff,
'TxPower' : TxPower,
'RxPRBSInvertPattern' : True if RxPRBSInvertPattern else False,
'TxPowerRampdBmPerSec' : TxPowerRampdBmPerSec,
'EnableTxPRBS' : True if EnableTxPRBS else False,
'TxPRBSInvertPattern' : True if TxPRBSInvertPattern else False,
'AdminState' : AdminState,
'ChannelNumber' : int(ChannelNumber),
'FECMode' : FECMode,
'ModulationFmt' : ModulationFmt,
'TxPulseShapeFltrType' : TxPulseShapeFltrType,
'RxPRBSPattern' : RxPRBSPattern,
'TxPRBSPattern' : TxPRBSPattern,
'DiffEncoding' : True if DiffEncoding else False,
}
reqUrl = self.cfgUrlBase+'DWDMModuleNwIntf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDWDMModuleNwIntf(self,
NwIntfId,
ModuleId):
obj = {
'NwIntfId' : NwIntfId,
'ModuleId' : ModuleId,
}
reqUrl = self.cfgUrlBase+'DWDMModuleNwIntf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDWDMModuleNwIntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'DWDMModuleNwIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateDWDMModuleNwIntf(self,
NwIntfId,
ModuleId,
ClntIntfIdToTributary0Map = None,
ClntIntfIdToTributary1Map = None,
EnableRxPRBSChecker = None,
TxPulseShapeFltrRollOff = None,
TxPower = None,
RxPRBSInvertPattern = None,
TxPowerRampdBmPerSec = None,
EnableTxPRBS = None,
TxPRBSInvertPattern = None,
AdminState = None,
ChannelNumber = None,
FECMode = None,
ModulationFmt = None,
TxPulseShapeFltrType = None,
RxPRBSPattern = None,
TxPRBSPattern = None,
DiffEncoding = None):
obj = {}
if NwIntfId != None :
obj['NwIntfId'] = int(NwIntfId)
if ModuleId != None :
obj['ModuleId'] = int(ModuleId)
if ClntIntfIdToTributary0Map != None :
obj['ClntIntfIdToTributary0Map'] = int(ClntIntfIdToTributary0Map)
if ClntIntfIdToTributary1Map != None :
obj['ClntIntfIdToTributary1Map'] = int(ClntIntfIdToTributary1Map)
if EnableRxPRBSChecker != None :
obj['EnableRxPRBSChecker'] = True if EnableRxPRBSChecker else False
if TxPulseShapeFltrRollOff != None :
obj['TxPulseShapeFltrRollOff'] = TxPulseShapeFltrRollOff
if TxPower != None :
obj['TxPower'] = TxPower
if RxPRBSInvertPattern != None :
obj['RxPRBSInvertPattern'] = True if RxPRBSInvertPattern else False
if TxPowerRampdBmPerSec != None :
obj['TxPowerRampdBmPerSec'] = TxPowerRampdBmPerSec
if EnableTxPRBS != None :
obj['EnableTxPRBS'] = True if EnableTxPRBS else False
if TxPRBSInvertPattern != None :
obj['TxPRBSInvertPattern'] = True if TxPRBSInvertPattern else False
if AdminState != None :
obj['AdminState'] = AdminState
if ChannelNumber != None :
obj['ChannelNumber'] = int(ChannelNumber)
if FECMode != None :
obj['FECMode'] = FECMode
if ModulationFmt != None :
obj['ModulationFmt'] = ModulationFmt
if TxPulseShapeFltrType != None :
obj['TxPulseShapeFltrType'] = TxPulseShapeFltrType
if RxPRBSPattern != None :
obj['RxPRBSPattern'] = RxPRBSPattern
if TxPRBSPattern != None :
obj['TxPRBSPattern'] = TxPRBSPattern
if DiffEncoding != None :
obj['DiffEncoding'] = True if DiffEncoding else False
reqUrl = self.cfgUrlBase+'DWDMModuleNwIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDWDMModuleNwIntfById(self,
objectId,
ClntIntfIdToTributary0Map = None,
ClntIntfIdToTributary1Map = None,
EnableRxPRBSChecker = None,
TxPulseShapeFltrRollOff = None,
TxPower = None,
RxPRBSInvertPattern = None,
TxPowerRampdBmPerSec = None,
EnableTxPRBS = None,
TxPRBSInvertPattern = None,
AdminState = None,
ChannelNumber = None,
FECMode = None,
ModulationFmt = None,
TxPulseShapeFltrType = None,
RxPRBSPattern = None,
TxPRBSPattern = None,
DiffEncoding = None):
obj = {}
if ClntIntfIdToTributary0Map != None:
obj['ClntIntfIdToTributary0Map'] = ClntIntfIdToTributary0Map
if ClntIntfIdToTributary1Map != None:
obj['ClntIntfIdToTributary1Map'] = ClntIntfIdToTributary1Map
if EnableRxPRBSChecker != None:
obj['EnableRxPRBSChecker'] = EnableRxPRBSChecker
if TxPulseShapeFltrRollOff != None:
obj['TxPulseShapeFltrRollOff'] = TxPulseShapeFltrRollOff
if TxPower != None:
obj['TxPower'] = TxPower
if RxPRBSInvertPattern != None:
obj['RxPRBSInvertPattern'] = RxPRBSInvertPattern
if TxPowerRampdBmPerSec != None:
obj['TxPowerRampdBmPerSec'] = TxPowerRampdBmPerSec
if EnableTxPRBS != None:
obj['EnableTxPRBS'] = EnableTxPRBS
if TxPRBSInvertPattern != None:
obj['TxPRBSInvertPattern'] = TxPRBSInvertPattern
if AdminState != None:
obj['AdminState'] = AdminState
if ChannelNumber != None:
obj['ChannelNumber'] = ChannelNumber
if FECMode != None:
obj['FECMode'] = FECMode
if ModulationFmt != None:
obj['ModulationFmt'] = ModulationFmt
if TxPulseShapeFltrType != None:
obj['TxPulseShapeFltrType'] = TxPulseShapeFltrType
if RxPRBSPattern != None:
obj['RxPRBSPattern'] = RxPRBSPattern
if TxPRBSPattern != None:
obj['TxPRBSPattern'] = TxPRBSPattern
if DiffEncoding != None:
obj['DiffEncoding'] = DiffEncoding
reqUrl = self.cfgUrlBase+'DWDMModuleNwIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDWDMModuleNwIntf(self,
NwIntfId,
ModuleId,
op,
path,
value,):
obj = {}
obj['NwIntfId'] = NwIntfId
obj['ModuleId'] = ModuleId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DWDMModuleNwIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDWDMModuleNwIntf(self,
NwIntfId,
ModuleId):
obj = {
'NwIntfId' : int(NwIntfId),
'ModuleId' : int(ModuleId),
}
reqUrl = self.cfgUrlBase + 'DWDMModuleNwIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDWDMModuleNwIntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DWDMModuleNwIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDWDMModuleNwIntfs(self):
return self.getObjects('DWDMModuleNwIntf', self.cfgUrlBase)
"""
.. automethod :: createComponentLogging(self,
:param string Module : Module name to set logging level Module name to set logging level
:param string Level : Logging level Logging level
"""
def createComponentLogging(self,
Module,
Level='info'):
obj = {
'Module' : Module,
'Level' : Level,
}
reqUrl = self.cfgUrlBase+'ComponentLogging'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteComponentLogging(self,
Module):
obj = {
'Module' : Module,
}
reqUrl = self.cfgUrlBase+'ComponentLogging'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteComponentLoggingById(self, objectId ):
reqUrl = self.cfgUrlBase+'ComponentLogging'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateComponentLogging(self,
Module,
Level = None):
obj = {}
if Module != None :
obj['Module'] = Module
if Level != None :
obj['Level'] = Level
reqUrl = self.cfgUrlBase+'ComponentLogging'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateComponentLoggingById(self,
objectId,
Level = None):
obj = {}
if Level != None:
obj['Level'] = Level
reqUrl = self.cfgUrlBase+'ComponentLogging'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateComponentLogging(self,
Module,
op,
path,
value,):
obj = {}
obj['Module'] = Module
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'ComponentLogging'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getComponentLogging(self,
Module):
obj = {
'Module' : Module,
}
reqUrl = self.cfgUrlBase + 'ComponentLogging'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getComponentLoggingById(self, objectId ):
reqUrl = self.cfgUrlBase + 'ComponentLogging'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllComponentLoggings(self):
return self.getObjects('ComponentLogging', self.cfgUrlBase)
"""
.. automethod :: createFan(self,
:param int32 FanId : Fan unit id Fan unit id
:param string AdminState : Fan admin ON/OFF Fan admin ON/OFF
:param int32 AdminSpeed : Fan set speed in rpm Fan set speed in rpm
"""
def createFan(self,
AdminState,
AdminSpeed):
obj = {
'FanId' : int(0),
'AdminState' : AdminState,
'AdminSpeed' : int(AdminSpeed),
}
reqUrl = self.cfgUrlBase+'Fan'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteFan(self,
FanId):
obj = {
'FanId' : FanId,
}
reqUrl = self.cfgUrlBase+'Fan'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteFanById(self, objectId ):
reqUrl = self.cfgUrlBase+'Fan'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateFan(self,
FanId,
AdminState = None,
AdminSpeed = None):
obj = {}
if FanId != None :
obj['FanId'] = int(FanId)
if AdminState != None :
obj['AdminState'] = AdminState
if AdminSpeed != None :
obj['AdminSpeed'] = int(AdminSpeed)
reqUrl = self.cfgUrlBase+'Fan'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateFanById(self,
objectId,
AdminState = None,
AdminSpeed = None):
obj = {}
if AdminState != None:
obj['AdminState'] = AdminState
if AdminSpeed != None:
obj['AdminSpeed'] = AdminSpeed
reqUrl = self.cfgUrlBase+'Fan'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateFan(self,
FanId,
op,
path,
value,):
obj = {}
obj['FanId'] = FanId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Fan'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getFan(self,
FanId):
obj = {
'FanId' : int(FanId),
}
reqUrl = self.cfgUrlBase + 'Fan'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getFanById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Fan'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllFans(self):
return self.getObjects('Fan', self.cfgUrlBase)
"""
.. automethod :: createAclIpv4Filter(self,
:param string FilterName : AClIpv4 filter name . AClIpv4 filter name .
:param int32 L4MinPort : Min port when l4 port is specified as range Min port when l4 port is specified as range
:param int32 L4DstPort : TCP/UDP destionation port TCP/UDP destionation port
:param string Proto : Protocol type TCP/UDP/ICMPv4/ICMPv6 Protocol type TCP/UDP/ICMPv4/ICMPv6
:param string DestIp : Destination IP address Destination IP address
:param int32 L4SrcPort : TCP/UDP source port TCP/UDP source port
:param string DestMask : Network mark for dest IP Network mark for dest IP
:param string DstIntf : Dest Intf(used for mlag) Dest Intf(used for mlag)
:param string SrcIntf : Source Intf(used for mlag) Source Intf(used for mlag)
:param string SourceMask : Network mask for source IP Network mask for source IP
:param int32 L4MaxPort : Max port when l4 port is specified as range Max port when l4 port is specified as range
:param string SourceIp : Source IP address Source IP address
:param string L4PortMatch : match condition can be EQ(equal) match condition can be EQ(equal)
"""
def createAclIpv4Filter(self,
FilterName,
L4MinPort=0,
L4DstPort=0,
Proto='',
DestIp='',
L4SrcPort=0,
DestMask='',
DstIntf='',
SrcIntf='',
SourceMask='',
L4MaxPort=0,
SourceIp='',
L4PortMatch='NA'):
obj = {
'FilterName' : FilterName,
'L4MinPort' : int(L4MinPort),
'L4DstPort' : int(L4DstPort),
'Proto' : Proto,
'DestIp' : DestIp,
'L4SrcPort' : int(L4SrcPort),
'DestMask' : DestMask,
'DstIntf' : DstIntf,
'SrcIntf' : SrcIntf,
'SourceMask' : SourceMask,
'L4MaxPort' : int(L4MaxPort),
'SourceIp' : SourceIp,
'L4PortMatch' : L4PortMatch,
}
reqUrl = self.cfgUrlBase+'AclIpv4Filter'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAclIpv4Filter(self,
FilterName):
obj = {
'FilterName' : FilterName,
}
reqUrl = self.cfgUrlBase+'AclIpv4Filter'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAclIpv4FilterById(self, objectId ):
reqUrl = self.cfgUrlBase+'AclIpv4Filter'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateAclIpv4Filter(self,
FilterName,
L4MinPort = None,
L4DstPort = None,
Proto = None,
DestIp = None,
L4SrcPort = None,
DestMask = None,
DstIntf = None,
SrcIntf = None,
SourceMask = None,
L4MaxPort = None,
SourceIp = None,
L4PortMatch = None):
obj = {}
if FilterName != None :
obj['FilterName'] = FilterName
if L4MinPort != None :
obj['L4MinPort'] = int(L4MinPort)
if L4DstPort != None :
obj['L4DstPort'] = int(L4DstPort)
if Proto != None :
obj['Proto'] = Proto
if DestIp != None :
obj['DestIp'] = DestIp
if L4SrcPort != None :
obj['L4SrcPort'] = int(L4SrcPort)
if DestMask != None :
obj['DestMask'] = DestMask
if DstIntf != None :
obj['DstIntf'] = DstIntf
if SrcIntf != None :
obj['SrcIntf'] = SrcIntf
if SourceMask != None :
obj['SourceMask'] = SourceMask
if L4MaxPort != None :
obj['L4MaxPort'] = int(L4MaxPort)
if SourceIp != None :
obj['SourceIp'] = SourceIp
if L4PortMatch != None :
obj['L4PortMatch'] = L4PortMatch
reqUrl = self.cfgUrlBase+'AclIpv4Filter'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateAclIpv4FilterById(self,
objectId,
L4MinPort = None,
L4DstPort = None,
Proto = None,
DestIp = None,
L4SrcPort = None,
DestMask = None,
DstIntf = None,
SrcIntf = None,
SourceMask = None,
L4MaxPort = None,
SourceIp = None,
L4PortMatch = None):
obj = {}
if L4MinPort != None:
obj['L4MinPort'] = L4MinPort
if L4DstPort != None:
obj['L4DstPort'] = L4DstPort
if Proto != None:
obj['Proto'] = Proto
if DestIp != None:
obj['DestIp'] = DestIp
if L4SrcPort != None:
obj['L4SrcPort'] = L4SrcPort
if DestMask != None:
obj['DestMask'] = DestMask
if DstIntf != None:
obj['DstIntf'] = DstIntf
if SrcIntf != None:
obj['SrcIntf'] = SrcIntf
if SourceMask != None:
obj['SourceMask'] = SourceMask
if L4MaxPort != None:
obj['L4MaxPort'] = L4MaxPort
if SourceIp != None:
obj['SourceIp'] = SourceIp
if L4PortMatch != None:
obj['L4PortMatch'] = L4PortMatch
reqUrl = self.cfgUrlBase+'AclIpv4Filter'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateAclIpv4Filter(self,
FilterName,
op,
path,
value,):
obj = {}
obj['FilterName'] = FilterName
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'AclIpv4Filter'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getAclIpv4Filter(self,
FilterName):
obj = {
'FilterName' : FilterName,
}
reqUrl = self.cfgUrlBase + 'AclIpv4Filter'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAclIpv4FilterById(self, objectId ):
reqUrl = self.cfgUrlBase + 'AclIpv4Filter'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAclIpv4Filters(self):
return self.getObjects('AclIpv4Filter', self.cfgUrlBase)
"""
.. automethod :: createSubIPv6Intf(self,
:param string IntfRef : Intf name for which ipv6Intf sub interface is to be configured Intf name for which ipv6Intf sub interface is to be configured
:param string Type : Type of interface Type of interface
:param string IpAddr : Ip Address for sub interface Ip Address for sub interface
:param string MacAddr : Mac address to be used for the sub interface. If none specified IPv6Intf mac address will be used Mac address to be used for the sub interface. If none specified IPv6Intf mac address will be used
:param bool Enable : Enable or disable this interface Enable or disable this interface
:param bool LinkIp : Interface Link Scope IP Address auto-configured Interface Link Scope IP Address auto-configured
"""
def createSubIPv6Intf(self,
IntfRef,
Type,
IpAddr,
MacAddr='',
Enable=True,
LinkIp=True):
obj = {
'IntfRef' : IntfRef,
'Type' : Type,
'IpAddr' : IpAddr,
'MacAddr' : MacAddr,
'Enable' : True if Enable else False,
'LinkIp' : True if LinkIp else False,
}
reqUrl = self.cfgUrlBase+'SubIPv6Intf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteSubIPv6Intf(self,
IntfRef,
Type):
obj = {
'IntfRef' : IntfRef,
'Type' : Type,
}
reqUrl = self.cfgUrlBase+'SubIPv6Intf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteSubIPv6IntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'SubIPv6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateSubIPv6Intf(self,
IntfRef,
Type,
IpAddr = None,
MacAddr = None,
Enable = None,
LinkIp = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if Type != None :
obj['Type'] = Type
if IpAddr != None :
obj['IpAddr'] = IpAddr
if MacAddr != None :
obj['MacAddr'] = MacAddr
if Enable != None :
obj['Enable'] = True if Enable else False
if LinkIp != None :
obj['LinkIp'] = True if LinkIp else False
reqUrl = self.cfgUrlBase+'SubIPv6Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateSubIPv6IntfById(self,
objectId,
IpAddr = None,
MacAddr = None,
Enable = None,
LinkIp = None):
obj = {}
if IpAddr != None:
obj['IpAddr'] = IpAddr
if MacAddr != None:
obj['MacAddr'] = MacAddr
if Enable != None:
obj['Enable'] = Enable
if LinkIp != None:
obj['LinkIp'] = LinkIp
reqUrl = self.cfgUrlBase+'SubIPv6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateSubIPv6Intf(self,
IntfRef,
Type,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['Type'] = Type
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'SubIPv6Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getSubIPv6Intf(self,
IntfRef,
Type):
obj = {
'IntfRef' : IntfRef,
'Type' : Type,
}
reqUrl = self.cfgUrlBase + 'SubIPv6Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSubIPv6IntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'SubIPv6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSubIPv6Intfs(self):
return self.getObjects('SubIPv6Intf', self.cfgUrlBase)
def getIPv6RouteState(self,
DestinationNw):
obj = {
'DestinationNw' : DestinationNw,
}
reqUrl = self.stateUrlBase + 'IPv6Route'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv6RouteStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'IPv6Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv6RouteStates(self):
return self.getObjects('IPv6Route', self.stateUrlBase)
def getPolicyPrefixSetState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'PolicyPrefixSet'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyPrefixSetStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PolicyPrefixSet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyPrefixSetStates(self):
return self.getObjects('PolicyPrefixSet', self.stateUrlBase)
"""
.. automethod :: createPsu(self,
:param int32 PsuId : PSU id PSU id
:param string AdminState : Admin UP/DOWN PSU Admin UP/DOWN PSU
"""
def createPsu(self,
AdminState):
obj = {
'PsuId' : int(0),
'AdminState' : AdminState,
}
reqUrl = self.cfgUrlBase+'Psu'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePsu(self,
PsuId):
obj = {
'PsuId' : PsuId,
}
reqUrl = self.cfgUrlBase+'Psu'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePsuById(self, objectId ):
reqUrl = self.cfgUrlBase+'Psu'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePsu(self,
PsuId,
AdminState = None):
obj = {}
if PsuId != None :
obj['PsuId'] = int(PsuId)
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'Psu'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePsuById(self,
objectId,
AdminState = None):
obj = {}
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'Psu'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePsu(self,
PsuId,
op,
path,
value,):
obj = {}
obj['PsuId'] = PsuId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Psu'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPsu(self,
PsuId):
obj = {
'PsuId' : int(PsuId),
}
reqUrl = self.cfgUrlBase + 'Psu'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPsuById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Psu'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPsus(self):
return self.getObjects('Psu', self.cfgUrlBase)
def getBGPv4NeighborState(self,
IntfRef,
NeighborAddress):
obj = {
'IntfRef' : IntfRef,
'NeighborAddress' : NeighborAddress,
}
reqUrl = self.stateUrlBase + 'BGPv4Neighbor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv4NeighborStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BGPv4Neighbor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv4NeighborStates(self):
return self.getObjects('BGPv4Neighbor', self.stateUrlBase)
"""
.. automethod :: executeArpRefreshByIPv4Addr(self,
:param string IpAddr : Neighbor's IP Address for which corresponding Arp entry needed to be re-learned Neighbor's IP Address for which corresponding Arp entry needed to be re-learned
"""
def executeArpRefreshByIPv4Addr(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.actionUrlBase+'ArpRefreshByIPv4Addr'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def updateXponderGlobal(self,
XponderId,
XponderDescription = None,
XponderMode = None):
obj = {}
if XponderId != None :
obj['XponderId'] = int(XponderId)
if XponderDescription != None :
obj['XponderDescription'] = XponderDescription
if XponderMode != None :
obj['XponderMode'] = XponderMode
reqUrl = self.cfgUrlBase+'XponderGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateXponderGlobalById(self,
objectId,
XponderDescription = None,
XponderMode = None):
obj = {}
if XponderDescription != None:
obj['XponderDescription'] = XponderDescription
if XponderMode != None:
obj['XponderMode'] = XponderMode
reqUrl = self.cfgUrlBase+'XponderGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateXponderGlobal(self,
XponderId,
op,
path,
value,):
obj = {}
obj['XponderId'] = XponderId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'XponderGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getXponderGlobal(self,
XponderId):
obj = {
'XponderId' : int(XponderId),
}
reqUrl = self.cfgUrlBase + 'XponderGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getXponderGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'XponderGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllXponderGlobals(self):
return self.getObjects('XponderGlobal', self.cfgUrlBase)
def getVrrpGlobalState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'VrrpGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVrrpGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'VrrpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVrrpGlobalStates(self):
return self.getObjects('VrrpGlobal', self.stateUrlBase)
"""
.. automethod :: createOspfAreaEntry(self,
:param string AreaId : A 32-bit integer uniquely identifying an area. Area ID 0.0.0.0 is used for the OSPF backbone. A 32-bit integer uniquely identifying an area. Area ID 0.0.0.0 is used for the OSPF backbone.
:param int32 AuthType : The authentication type specified for an area. The authentication type specified for an area.
:param int32 ImportAsExtern : Indicates if an area is a stub area Indicates if an area is a stub area
:param int32 AreaSummary : The variable ospfAreaSummary controls the import of summary LSAs into stub and NSSA areas. It has no effect on other areas. If it is noAreaSummary The variable ospfAreaSummary controls the import of summary LSAs into stub and NSSA areas. It has no effect on other areas. If it is noAreaSummary
:param int32 StubDefaultCost : For ABR this cost indicates default cost for summary LSA. For ABR this cost indicates default cost for summary LSA.
"""
def createOspfAreaEntry(self,
AreaId,
AuthType,
ImportAsExtern,
AreaSummary,
StubDefaultCost=10):
obj = {
'AreaId' : AreaId,
'AuthType' : int(AuthType),
'ImportAsExtern' : int(ImportAsExtern),
'AreaSummary' : int(AreaSummary),
'StubDefaultCost' : int(StubDefaultCost),
}
reqUrl = self.cfgUrlBase+'OspfAreaEntry'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfAreaEntry(self,
AreaId):
obj = {
'AreaId' : AreaId,
}
reqUrl = self.cfgUrlBase+'OspfAreaEntry'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfAreaEntryById(self, objectId ):
reqUrl = self.cfgUrlBase+'OspfAreaEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateOspfAreaEntry(self,
AreaId,
AuthType = None,
ImportAsExtern = None,
AreaSummary = None,
StubDefaultCost = None):
obj = {}
if AreaId != None :
obj['AreaId'] = AreaId
if AuthType != None :
obj['AuthType'] = int(AuthType)
if ImportAsExtern != None :
obj['ImportAsExtern'] = int(ImportAsExtern)
if AreaSummary != None :
obj['AreaSummary'] = int(AreaSummary)
if StubDefaultCost != None :
obj['StubDefaultCost'] = int(StubDefaultCost)
reqUrl = self.cfgUrlBase+'OspfAreaEntry'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateOspfAreaEntryById(self,
objectId,
AuthType = None,
ImportAsExtern = None,
AreaSummary = None,
StubDefaultCost = None):
obj = {}
if AuthType != None:
obj['AuthType'] = AuthType
if ImportAsExtern != None:
obj['ImportAsExtern'] = ImportAsExtern
if AreaSummary != None:
obj['AreaSummary'] = AreaSummary
if StubDefaultCost != None:
obj['StubDefaultCost'] = StubDefaultCost
reqUrl = self.cfgUrlBase+'OspfAreaEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateOspfAreaEntry(self,
AreaId,
op,
path,
value,):
obj = {}
obj['AreaId'] = AreaId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'OspfAreaEntry'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getOspfAreaEntry(self,
AreaId):
obj = {
'AreaId' : AreaId,
}
reqUrl = self.cfgUrlBase + 'OspfAreaEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfAreaEntryById(self, objectId ):
reqUrl = self.cfgUrlBase + 'OspfAreaEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfAreaEntrys(self):
return self.getObjects('OspfAreaEntry', self.cfgUrlBase)
def updateOspfv2Global(self,
Vrf,
ASBdrRtrStatus = None,
RouterId = None,
AdminState = None,
ReferenceBandwidth = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if ASBdrRtrStatus != None :
obj['ASBdrRtrStatus'] = True if ASBdrRtrStatus else False
if RouterId != None :
obj['RouterId'] = RouterId
if AdminState != None :
obj['AdminState'] = AdminState
if ReferenceBandwidth != None :
obj['ReferenceBandwidth'] = int(ReferenceBandwidth)
reqUrl = self.cfgUrlBase+'Ospfv2Global'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateOspfv2GlobalById(self,
objectId,
ASBdrRtrStatus = None,
RouterId = None,
AdminState = None,
ReferenceBandwidth = None):
obj = {}
if ASBdrRtrStatus != None:
obj['ASBdrRtrStatus'] = ASBdrRtrStatus
if RouterId != None:
obj['RouterId'] = RouterId
if AdminState != None:
obj['AdminState'] = AdminState
if ReferenceBandwidth != None:
obj['ReferenceBandwidth'] = ReferenceBandwidth
reqUrl = self.cfgUrlBase+'Ospfv2Global'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateOspfv2Global(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Ospfv2Global'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getOspfv2Global(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'Ospfv2Global'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfv2GlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Ospfv2Global'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfv2Globals(self):
return self.getObjects('Ospfv2Global', self.cfgUrlBase)
"""
.. automethod :: createVxlanVtepInstance(self,
:param string Intf : VTEP instance identifier name. should be defined as either vtep<id#> or <id#> if the later then 'vtep' will be prepended to the <id#> example VTEP instance identifier name. should be defined as either vtep<id#> or <id#> if the later then 'vtep' will be prepended to the <id#> example
:param uint32 Vni : VXLAN Network ID VXLAN Network ID
:param string IntfRef : Source interface where the source ip will be derived from. If an interface is not supplied the src-ip will be used. This attribute takes presedence over src-ip attribute. Source interface where the source ip will be derived from. If an interface is not supplied the src-ip will be used. This attribute takes presedence over src-ip attribute.
:param uint16 VlanId : Vlan Id to encapsulate with the vtep tunnel ethernet header Vlan Id to encapsulate with the vtep tunnel ethernet header
:param string DstIp : Destination IP address list for the VxLAN tunnel Destination IP address list for the VxLAN tunnel
:param uint16 TOS : Type of Service Type of Service
:param uint32 Mtu : Set the MTU to be applied to all VTEP within this VxLAN Set the MTU to be applied to all VTEP within this VxLAN
:param int32 InnerVlanHandlingMode : The inner vlan tag handling mode. The inner vlan tag handling mode.
:param string AdminState : Administrative state of VXLAN MAC/IP layer Administrative state of VXLAN MAC/IP layer
:param uint16 TTL : TTL of the Vxlan tunnel TTL of the Vxlan tunnel
:param string SrcIp : Source IP address for the VxLAN tunnel Source IP address for the VxLAN tunnel
:param uint16 DstUDP : vxlan udp port. Deafult is the iana default udp port vxlan udp port. Deafult is the iana default udp port
"""
def createVxlanVtepInstance(self,
Intf,
Vni,
IntfRef,
VlanId,
DstIp,
TOS=0,
Mtu=1450,
InnerVlanHandlingMode=0,
AdminState='UP',
TTL=64,
SrcIp='0.0.0.0',
DstUDP=4789):
obj = {
'Intf' : Intf,
'Vni' : int(Vni),
'IntfRef' : IntfRef,
'VlanId' : int(VlanId),
'DstIp' : DstIp,
'TOS' : int(TOS),
'Mtu' : int(Mtu),
'InnerVlanHandlingMode' : int(InnerVlanHandlingMode),
'AdminState' : AdminState,
'TTL' : int(TTL),
'SrcIp' : SrcIp,
'DstUDP' : int(DstUDP),
}
reqUrl = self.cfgUrlBase+'VxlanVtepInstance'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVxlanVtepInstance(self,
Intf,
Vni):
obj = {
'Intf' : Intf,
'Vni' : Vni,
}
reqUrl = self.cfgUrlBase+'VxlanVtepInstance'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVxlanVtepInstanceById(self, objectId ):
reqUrl = self.cfgUrlBase+'VxlanVtepInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateVxlanVtepInstance(self,
Intf,
Vni,
IntfRef = None,
VlanId = None,
DstIp = None,
TOS = None,
Mtu = None,
InnerVlanHandlingMode = None,
AdminState = None,
TTL = None,
SrcIp = None,
DstUDP = None):
obj = {}
if Intf != None :
obj['Intf'] = Intf
if Vni != None :
obj['Vni'] = int(Vni)
if IntfRef != None :
obj['IntfRef'] = IntfRef
if VlanId != None :
obj['VlanId'] = int(VlanId)
if DstIp != None :
obj['DstIp'] = DstIp
if TOS != None :
obj['TOS'] = int(TOS)
if Mtu != None :
obj['Mtu'] = int(Mtu)
if InnerVlanHandlingMode != None :
obj['InnerVlanHandlingMode'] = int(InnerVlanHandlingMode)
if AdminState != None :
obj['AdminState'] = AdminState
if TTL != None :
obj['TTL'] = int(TTL)
if SrcIp != None :
obj['SrcIp'] = SrcIp
if DstUDP != None :
obj['DstUDP'] = int(DstUDP)
reqUrl = self.cfgUrlBase+'VxlanVtepInstance'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateVxlanVtepInstanceById(self,
objectId,
IntfRef = None,
VlanId = None,
DstIp = None,
TOS = None,
Mtu = None,
InnerVlanHandlingMode = None,
AdminState = None,
TTL = None,
SrcIp = None,
DstUDP = None):
obj = {}
if IntfRef != None:
obj['IntfRef'] = IntfRef
if VlanId != None:
obj['VlanId'] = VlanId
if DstIp != None:
obj['DstIp'] = DstIp
if TOS != None:
obj['TOS'] = TOS
if Mtu != None:
obj['Mtu'] = Mtu
if InnerVlanHandlingMode != None:
obj['InnerVlanHandlingMode'] = InnerVlanHandlingMode
if AdminState != None:
obj['AdminState'] = AdminState
if TTL != None:
obj['TTL'] = TTL
if SrcIp != None:
obj['SrcIp'] = SrcIp
if DstUDP != None:
obj['DstUDP'] = DstUDP
reqUrl = self.cfgUrlBase+'VxlanVtepInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateVxlanVtepInstance(self,
Intf,
Vni,
op,
path,
value,):
obj = {}
obj['Intf'] = Intf
obj['Vni'] = Vni
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'VxlanVtepInstance'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getVxlanVtepInstance(self,
Intf,
Vni):
obj = {
'Intf' : Intf,
'Vni' : int(Vni),
}
reqUrl = self.cfgUrlBase + 'VxlanVtepInstance'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVxlanVtepInstanceById(self, objectId ):
reqUrl = self.cfgUrlBase + 'VxlanVtepInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVxlanVtepInstances(self):
return self.getObjects('VxlanVtepInstance', self.cfgUrlBase)
def getOspfv2RouteState(self,
DestId,
DestType,
AddrMask):
obj = {
'DestId' : DestId,
'DestType' : DestType,
'AddrMask' : AddrMask,
}
reqUrl = self.stateUrlBase + 'Ospfv2Route'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfv2RouteStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Ospfv2Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfv2RouteStates(self):
return self.getObjects('Ospfv2Route', self.stateUrlBase)
def getLaPortChannelState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'LaPortChannel'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLaPortChannelStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'LaPortChannel'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLaPortChannelStates(self):
return self.getObjects('LaPortChannel', self.stateUrlBase)
def getVxlanInstanceState(self,
Vni):
obj = {
'Vni' : int(Vni),
}
reqUrl = self.stateUrlBase + 'VxlanInstance'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVxlanInstanceStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'VxlanInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVxlanInstanceStates(self):
return self.getObjects('VxlanInstance', self.stateUrlBase)
"""
.. automethod :: createDhcpGlobalConfig(self,
:param string DhcpConfigKey : DHCP global config DHCP global config
:param bool Enable : DHCP Server enable/disable control DEFAULT DHCP Server enable/disable control DEFAULT
:param uint32 DefaultLeaseTime : Default Lease Time in seconds DEFAULT Default Lease Time in seconds DEFAULT
:param uint32 MaxLeaseTime : Max Lease Time in seconds DEFAULT Max Lease Time in seconds DEFAULT
"""
def createDhcpGlobalConfig(self,
Enable,
DefaultLeaseTime,
MaxLeaseTime):
obj = {
'DhcpConfigKey' : 'default',
'Enable' : True if Enable else False,
'DefaultLeaseTime' : int(DefaultLeaseTime),
'MaxLeaseTime' : int(MaxLeaseTime),
}
reqUrl = self.cfgUrlBase+'DhcpGlobalConfig'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDhcpGlobalConfig(self,
DhcpConfigKey):
obj = {
'DhcpConfigKey' : DhcpConfigKey,
}
reqUrl = self.cfgUrlBase+'DhcpGlobalConfig'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDhcpGlobalConfigById(self, objectId ):
reqUrl = self.cfgUrlBase+'DhcpGlobalConfig'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateDhcpGlobalConfig(self,
DhcpConfigKey,
Enable = None,
DefaultLeaseTime = None,
MaxLeaseTime = None):
obj = {}
if DhcpConfigKey != None :
obj['DhcpConfigKey'] = DhcpConfigKey
if Enable != None :
obj['Enable'] = True if Enable else False
if DefaultLeaseTime != None :
obj['DefaultLeaseTime'] = int(DefaultLeaseTime)
if MaxLeaseTime != None :
obj['MaxLeaseTime'] = int(MaxLeaseTime)
reqUrl = self.cfgUrlBase+'DhcpGlobalConfig'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDhcpGlobalConfigById(self,
objectId,
Enable = None,
DefaultLeaseTime = None,
MaxLeaseTime = None):
obj = {}
if Enable != None:
obj['Enable'] = Enable
if DefaultLeaseTime != None:
obj['DefaultLeaseTime'] = DefaultLeaseTime
if MaxLeaseTime != None:
obj['MaxLeaseTime'] = MaxLeaseTime
reqUrl = self.cfgUrlBase+'DhcpGlobalConfig'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDhcpGlobalConfig(self,
DhcpConfigKey,
op,
path,
value,):
obj = {}
obj['DhcpConfigKey'] = DhcpConfigKey
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DhcpGlobalConfig'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDhcpGlobalConfig(self,
DhcpConfigKey):
obj = {
'DhcpConfigKey' : DhcpConfigKey,
}
reqUrl = self.cfgUrlBase + 'DhcpGlobalConfig'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDhcpGlobalConfigById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DhcpGlobalConfig'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDhcpGlobalConfigs(self):
return self.getObjects('DhcpGlobalConfig', self.cfgUrlBase)
"""
.. automethod :: createDWDMModuleClntIntf(self,
:param uint8 ClntIntfId : DWDM Module client interface identifier DWDM Module client interface identifier
:param uint8 ModuleId : DWDM Module identifier DWDM Module identifier
:param uint8 NwLaneTributaryToClntIntfMap : Network lane/tributary id to map to client interface Network lane/tributary id to map to client interface
:param uint8 HostTxEqDfe : Host interface TX deserializer equalization. s-DFE Host interface TX deserializer equalization. s-DFE
:param uint8 HostRxSerializerTap1Gain : Host RX Serializer tap 1 control Host RX Serializer tap 1 control
:param string RxPRBSPattern : RX PRBS generator pattern RX PRBS generator pattern
:param uint8 HostRxSerializerTap2Delay : Host RX Serializer tap 2 control Host RX Serializer tap 2 control
:param uint8 HostRxSerializerTap2Gain : Host RX Serializer tap 2 control Host RX Serializer tap 2 control
:param uint8 HostRxSerializerTap0Delay : Host RX Serializer tap 0 control Host RX Serializer tap 0 control
:param uint8 HostTxEqCtle : Host interface TX deserializer equalization. LELRC CTLE LE gain code. Host interface TX deserializer equalization. LELRC CTLE LE gain code.
:param string TxPRBSPattern : PRBS pattern to use for checker PRBS pattern to use for checker
:param uint8 HostTxEqLfCtle : Host interface TX deserializer equalization. LELPZRC LF-CTLE LFPZ gain code. Host interface TX deserializer equalization. LELPZRC LF-CTLE LFPZ gain code.
:param string AdminState : Administrative state of this client interface Administrative state of this client interface
:param bool RXFECDecDisable : 802.3bj FEC decoder enable/disable state for traffic from DWDM module to Host 802.3bj FEC decoder enable/disable state for traffic from DWDM module to Host
:param bool EnableTxPRBSChecker : Enable/Disable TX PRBS checker for all lanes of this client interface Enable/Disable TX PRBS checker for all lanes of this client interface
:param bool EnableHostLoopback : Enable/Disable loopback on all host lanes of this client interface Enable/Disable loopback on all host lanes of this client interface
:param uint8 HostRxSerializerTap0Gain : Host RX Serializer tap 0 control Host RX Serializer tap 0 control
:param bool TXFECDecDisable : 802.3bj FEC decoder enable/disable state for traffic from Host to DWDM Module 802.3bj FEC decoder enable/disable state for traffic from Host to DWDM Module
:param bool EnableRxPRBS : Enable/Disable RX PRBS generation for all lanes of this client interface Enable/Disable RX PRBS generation for all lanes of this client interface
:param bool EnableIntSerdesNWLoopback : Enable/Disable serdes internal loopback Enable/Disable serdes internal loopback
"""
def createDWDMModuleClntIntf(self,
ClntIntfId,
ModuleId,
NwLaneTributaryToClntIntfMap,
HostTxEqDfe=0,
HostRxSerializerTap1Gain=7,
RxPRBSPattern='2^31',
HostRxSerializerTap2Delay=5,
HostRxSerializerTap2Gain=15,
HostRxSerializerTap0Delay=7,
HostTxEqCtle=18,
TxPRBSPattern='2^31',
HostTxEqLfCtle=0,
AdminState='UP',
RXFECDecDisable=False,
EnableTxPRBSChecker=False,
EnableHostLoopback=False,
HostRxSerializerTap0Gain=7,
TXFECDecDisable=False,
EnableRxPRBS=False,
EnableIntSerdesNWLoopback=False):
obj = {
'ClntIntfId' : int(ClntIntfId),
'ModuleId' : int(ModuleId),
'NwLaneTributaryToClntIntfMap' : int(NwLaneTributaryToClntIntfMap),
'HostTxEqDfe' : int(HostTxEqDfe),
'HostRxSerializerTap1Gain' : int(HostRxSerializerTap1Gain),
'RxPRBSPattern' : RxPRBSPattern,
'HostRxSerializerTap2Delay' : int(HostRxSerializerTap2Delay),
'HostRxSerializerTap2Gain' : int(HostRxSerializerTap2Gain),
'HostRxSerializerTap0Delay' : int(HostRxSerializerTap0Delay),
'HostTxEqCtle' : int(HostTxEqCtle),
'TxPRBSPattern' : TxPRBSPattern,
'HostTxEqLfCtle' : int(HostTxEqLfCtle),
'AdminState' : AdminState,
'RXFECDecDisable' : True if RXFECDecDisable else False,
'EnableTxPRBSChecker' : True if EnableTxPRBSChecker else False,
'EnableHostLoopback' : True if EnableHostLoopback else False,
'HostRxSerializerTap0Gain' : int(HostRxSerializerTap0Gain),
'TXFECDecDisable' : True if TXFECDecDisable else False,
'EnableRxPRBS' : True if EnableRxPRBS else False,
'EnableIntSerdesNWLoopback' : True if EnableIntSerdesNWLoopback else False,
}
reqUrl = self.cfgUrlBase+'DWDMModuleClntIntf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDWDMModuleClntIntf(self,
ClntIntfId,
ModuleId):
obj = {
'ClntIntfId' : ClntIntfId,
'ModuleId' : ModuleId,
}
reqUrl = self.cfgUrlBase+'DWDMModuleClntIntf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDWDMModuleClntIntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'DWDMModuleClntIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateDWDMModuleClntIntf(self,
ClntIntfId,
ModuleId,
NwLaneTributaryToClntIntfMap = None,
HostTxEqDfe = None,
HostRxSerializerTap1Gain = None,
RxPRBSPattern = None,
HostRxSerializerTap2Delay = None,
HostRxSerializerTap2Gain = None,
HostRxSerializerTap0Delay = None,
HostTxEqCtle = None,
TxPRBSPattern = None,
HostTxEqLfCtle = None,
AdminState = None,
RXFECDecDisable = None,
EnableTxPRBSChecker = None,
EnableHostLoopback = None,
HostRxSerializerTap0Gain = None,
TXFECDecDisable = None,
EnableRxPRBS = None,
EnableIntSerdesNWLoopback = None):
obj = {}
if ClntIntfId != None :
obj['ClntIntfId'] = int(ClntIntfId)
if ModuleId != None :
obj['ModuleId'] = int(ModuleId)
if NwLaneTributaryToClntIntfMap != None :
obj['NwLaneTributaryToClntIntfMap'] = int(NwLaneTributaryToClntIntfMap)
if HostTxEqDfe != None :
obj['HostTxEqDfe'] = int(HostTxEqDfe)
if HostRxSerializerTap1Gain != None :
obj['HostRxSerializerTap1Gain'] = int(HostRxSerializerTap1Gain)
if RxPRBSPattern != None :
obj['RxPRBSPattern'] = RxPRBSPattern
if HostRxSerializerTap2Delay != None :
obj['HostRxSerializerTap2Delay'] = int(HostRxSerializerTap2Delay)
if HostRxSerializerTap2Gain != None :
obj['HostRxSerializerTap2Gain'] = int(HostRxSerializerTap2Gain)
if HostRxSerializerTap0Delay != None :
obj['HostRxSerializerTap0Delay'] = int(HostRxSerializerTap0Delay)
if HostTxEqCtle != None :
obj['HostTxEqCtle'] = int(HostTxEqCtle)
if TxPRBSPattern != None :
obj['TxPRBSPattern'] = TxPRBSPattern
if HostTxEqLfCtle != None :
obj['HostTxEqLfCtle'] = int(HostTxEqLfCtle)
if AdminState != None :
obj['AdminState'] = AdminState
if RXFECDecDisable != None :
obj['RXFECDecDisable'] = True if RXFECDecDisable else False
if EnableTxPRBSChecker != None :
obj['EnableTxPRBSChecker'] = True if EnableTxPRBSChecker else False
if EnableHostLoopback != None :
obj['EnableHostLoopback'] = True if EnableHostLoopback else False
if HostRxSerializerTap0Gain != None :
obj['HostRxSerializerTap0Gain'] = int(HostRxSerializerTap0Gain)
if TXFECDecDisable != None :
obj['TXFECDecDisable'] = True if TXFECDecDisable else False
if EnableRxPRBS != None :
obj['EnableRxPRBS'] = True if EnableRxPRBS else False
if EnableIntSerdesNWLoopback != None :
obj['EnableIntSerdesNWLoopback'] = True if EnableIntSerdesNWLoopback else False
reqUrl = self.cfgUrlBase+'DWDMModuleClntIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDWDMModuleClntIntfById(self,
objectId,
NwLaneTributaryToClntIntfMap = None,
HostTxEqDfe = None,
HostRxSerializerTap1Gain = None,
RxPRBSPattern = None,
HostRxSerializerTap2Delay = None,
HostRxSerializerTap2Gain = None,
HostRxSerializerTap0Delay = None,
HostTxEqCtle = None,
TxPRBSPattern = None,
HostTxEqLfCtle = None,
AdminState = None,
RXFECDecDisable = None,
EnableTxPRBSChecker = None,
EnableHostLoopback = None,
HostRxSerializerTap0Gain = None,
TXFECDecDisable = None,
EnableRxPRBS = None,
EnableIntSerdesNWLoopback = None):
obj = {}
if NwLaneTributaryToClntIntfMap != None:
obj['NwLaneTributaryToClntIntfMap'] = NwLaneTributaryToClntIntfMap
if HostTxEqDfe != None:
obj['HostTxEqDfe'] = HostTxEqDfe
if HostRxSerializerTap1Gain != None:
obj['HostRxSerializerTap1Gain'] = HostRxSerializerTap1Gain
if RxPRBSPattern != None:
obj['RxPRBSPattern'] = RxPRBSPattern
if HostRxSerializerTap2Delay != None:
obj['HostRxSerializerTap2Delay'] = HostRxSerializerTap2Delay
if HostRxSerializerTap2Gain != None:
obj['HostRxSerializerTap2Gain'] = HostRxSerializerTap2Gain
if HostRxSerializerTap0Delay != None:
obj['HostRxSerializerTap0Delay'] = HostRxSerializerTap0Delay
if HostTxEqCtle != None:
obj['HostTxEqCtle'] = HostTxEqCtle
if TxPRBSPattern != None:
obj['TxPRBSPattern'] = TxPRBSPattern
if HostTxEqLfCtle != None:
obj['HostTxEqLfCtle'] = HostTxEqLfCtle
if AdminState != None:
obj['AdminState'] = AdminState
if RXFECDecDisable != None:
obj['RXFECDecDisable'] = RXFECDecDisable
if EnableTxPRBSChecker != None:
obj['EnableTxPRBSChecker'] = EnableTxPRBSChecker
if EnableHostLoopback != None:
obj['EnableHostLoopback'] = EnableHostLoopback
if HostRxSerializerTap0Gain != None:
obj['HostRxSerializerTap0Gain'] = HostRxSerializerTap0Gain
if TXFECDecDisable != None:
obj['TXFECDecDisable'] = TXFECDecDisable
if EnableRxPRBS != None:
obj['EnableRxPRBS'] = EnableRxPRBS
if EnableIntSerdesNWLoopback != None:
obj['EnableIntSerdesNWLoopback'] = EnableIntSerdesNWLoopback
reqUrl = self.cfgUrlBase+'DWDMModuleClntIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDWDMModuleClntIntf(self,
ClntIntfId,
ModuleId,
op,
path,
value,):
obj = {}
obj['ClntIntfId'] = ClntIntfId
obj['ModuleId'] = ModuleId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DWDMModuleClntIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDWDMModuleClntIntf(self,
ClntIntfId,
ModuleId):
obj = {
'ClntIntfId' : int(ClntIntfId),
'ModuleId' : int(ModuleId),
}
reqUrl = self.cfgUrlBase + 'DWDMModuleClntIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDWDMModuleClntIntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DWDMModuleClntIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDWDMModuleClntIntfs(self):
return self.getObjects('DWDMModuleClntIntf', self.cfgUrlBase)
def getDistributedRelayState(self,
DrniName):
obj = {
'DrniName' : DrniName,
}
reqUrl = self.stateUrlBase + 'DistributedRelay'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDistributedRelayStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DistributedRelay'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDistributedRelayStates(self):
return self.getObjects('DistributedRelay', self.stateUrlBase)
def getEthernetPMState(self,
IntfRef,
Resource):
obj = {
'IntfRef' : IntfRef,
'Resource' : Resource,
}
reqUrl = self.stateUrlBase + 'EthernetPM'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getEthernetPMStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'EthernetPM'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllEthernetPMStates(self):
return self.getObjects('EthernetPM', self.stateUrlBase)
"""
.. automethod :: createBGPv4Neighbor(self,
:param string IntfRef : Interface of the BGP neighbor Interface of the BGP neighbor
:param string NeighborAddress : Address of the BGP neighbor Address of the BGP neighbor
:param bool BfdEnable : Enable/Disable BFD for the BGP neighbor Enable/Disable BFD for the BGP neighbor
:param string PeerGroup : Peer group of the BGP neighbor Peer group of the BGP neighbor
:param uint8 MultiHopTTL : TTL for multi hop BGP neighbor TTL for multi hop BGP neighbor
:param string LocalAS : Local AS of the BGP neighbor Local AS of the BGP neighbor
:param uint32 KeepaliveTime : Keep alive time for the BGP neighbor Keep alive time for the BGP neighbor
:param bool AddPathsRx : Receive additional paths from BGP neighbor Receive additional paths from BGP neighbor
:param string UpdateSource : Source IP to connect to the BGP neighbor Source IP to connect to the BGP neighbor
:param bool RouteReflectorClient : Set/Clear BGP neighbor as a route reflector client Set/Clear BGP neighbor as a route reflector client
:param uint8 MaxPrefixesRestartTimer : Time in seconds to wait before we start BGP peer session when we receive max prefixes Time in seconds to wait before we start BGP peer session when we receive max prefixes
:param string Description : Description of the BGP neighbor Description of the BGP neighbor
:param bool MultiHopEnable : Enable/Disable multi hop for BGP neighbor Enable/Disable multi hop for BGP neighbor
:param string AuthPassword : Password to connect to the BGP neighbor Password to connect to the BGP neighbor
:param uint32 RouteReflectorClusterId : Cluster Id of the internal BGP neighbor route reflector client Cluster Id of the internal BGP neighbor route reflector client
:param string AdjRIBOutFilter : Policy that is applied for Adj-RIB-Out prefix filtering Policy that is applied for Adj-RIB-Out prefix filtering
:param bool MaxPrefixesDisconnect : Disconnect the BGP peer session when we receive the max prefixes from the neighbor Disconnect the BGP peer session when we receive the max prefixes from the neighbor
:param string PeerAS : Peer AS of the BGP neighbor Peer AS of the BGP neighbor
:param uint8 AddPathsMaxTx : Max number of additional paths that can be transmitted to BGP neighbor Max number of additional paths that can be transmitted to BGP neighbor
:param string AdjRIBInFilter : Policy that is applied for Adj-RIB-In prefix filtering Policy that is applied for Adj-RIB-In prefix filtering
:param uint32 MaxPrefixes : Maximum number of prefixes that can be received from the BGP neighbor Maximum number of prefixes that can be received from the BGP neighbor
:param uint8 MaxPrefixesThresholdPct : The percentage of maximum prefixes before we start logging The percentage of maximum prefixes before we start logging
:param string BfdSessionParam : Bfd session param name to be applied Bfd session param name to be applied
:param bool NextHopSelf : Use neighbor source IP as the next hop for IBGP neighbors Use neighbor source IP as the next hop for IBGP neighbors
:param bool Disabled : Enable/Disable the BGP neighbor Enable/Disable the BGP neighbor
:param uint32 HoldTime : Hold time for the BGP neighbor Hold time for the BGP neighbor
:param uint32 ConnectRetryTime : Connect retry time to connect to BGP neighbor after disconnect Connect retry time to connect to BGP neighbor after disconnect
"""
def createBGPv4Neighbor(self,
IntfRef,
NeighborAddress,
BfdEnable=False,
PeerGroup='',
MultiHopTTL=0,
LocalAS='',
KeepaliveTime=0,
AddPathsRx=False,
UpdateSource='',
RouteReflectorClient=False,
MaxPrefixesRestartTimer=0,
Description='',
MultiHopEnable=False,
AuthPassword='',
RouteReflectorClusterId=0,
AdjRIBOutFilter='',
MaxPrefixesDisconnect=False,
PeerAS='',
AddPathsMaxTx=0,
AdjRIBInFilter='',
MaxPrefixes=0,
MaxPrefixesThresholdPct=80,
BfdSessionParam='default',
NextHopSelf=False,
Disabled=False,
HoldTime=0,
ConnectRetryTime=0):
obj = {
'IntfRef' : IntfRef,
'NeighborAddress' : NeighborAddress,
'BfdEnable' : True if BfdEnable else False,
'PeerGroup' : PeerGroup,
'MultiHopTTL' : int(MultiHopTTL),
'LocalAS' : LocalAS,
'KeepaliveTime' : int(KeepaliveTime),
'AddPathsRx' : True if AddPathsRx else False,
'UpdateSource' : UpdateSource,
'RouteReflectorClient' : True if RouteReflectorClient else False,
'MaxPrefixesRestartTimer' : int(MaxPrefixesRestartTimer),
'Description' : Description,
'MultiHopEnable' : True if MultiHopEnable else False,
'AuthPassword' : AuthPassword,
'RouteReflectorClusterId' : int(RouteReflectorClusterId),
'AdjRIBOutFilter' : AdjRIBOutFilter,
'MaxPrefixesDisconnect' : True if MaxPrefixesDisconnect else False,
'PeerAS' : PeerAS,
'AddPathsMaxTx' : int(AddPathsMaxTx),
'AdjRIBInFilter' : AdjRIBInFilter,
'MaxPrefixes' : int(MaxPrefixes),
'MaxPrefixesThresholdPct' : int(MaxPrefixesThresholdPct),
'BfdSessionParam' : BfdSessionParam,
'NextHopSelf' : True if NextHopSelf else False,
'Disabled' : True if Disabled else False,
'HoldTime' : int(HoldTime),
'ConnectRetryTime' : int(ConnectRetryTime),
}
reqUrl = self.cfgUrlBase+'BGPv4Neighbor'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv4Neighbor(self,
IntfRef,
NeighborAddress):
obj = {
'IntfRef' : IntfRef,
'NeighborAddress' : NeighborAddress,
}
reqUrl = self.cfgUrlBase+'BGPv4Neighbor'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv4NeighborById(self, objectId ):
reqUrl = self.cfgUrlBase+'BGPv4Neighbor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateBGPv4Neighbor(self,
IntfRef,
NeighborAddress,
BfdEnable = None,
PeerGroup = None,
MultiHopTTL = None,
LocalAS = None,
KeepaliveTime = None,
AddPathsRx = None,
UpdateSource = None,
RouteReflectorClient = None,
MaxPrefixesRestartTimer = None,
Description = None,
MultiHopEnable = None,
AuthPassword = None,
RouteReflectorClusterId = None,
AdjRIBOutFilter = None,
MaxPrefixesDisconnect = None,
PeerAS = None,
AddPathsMaxTx = None,
AdjRIBInFilter = None,
MaxPrefixes = None,
MaxPrefixesThresholdPct = None,
BfdSessionParam = None,
NextHopSelf = None,
Disabled = None,
HoldTime = None,
ConnectRetryTime = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if NeighborAddress != None :
obj['NeighborAddress'] = NeighborAddress
if BfdEnable != None :
obj['BfdEnable'] = True if BfdEnable else False
if PeerGroup != None :
obj['PeerGroup'] = PeerGroup
if MultiHopTTL != None :
obj['MultiHopTTL'] = int(MultiHopTTL)
if LocalAS != None :
obj['LocalAS'] = LocalAS
if KeepaliveTime != None :
obj['KeepaliveTime'] = int(KeepaliveTime)
if AddPathsRx != None :
obj['AddPathsRx'] = True if AddPathsRx else False
if UpdateSource != None :
obj['UpdateSource'] = UpdateSource
if RouteReflectorClient != None :
obj['RouteReflectorClient'] = True if RouteReflectorClient else False
if MaxPrefixesRestartTimer != None :
obj['MaxPrefixesRestartTimer'] = int(MaxPrefixesRestartTimer)
if Description != None :
obj['Description'] = Description
if MultiHopEnable != None :
obj['MultiHopEnable'] = True if MultiHopEnable else False
if AuthPassword != None :
obj['AuthPassword'] = AuthPassword
if RouteReflectorClusterId != None :
obj['RouteReflectorClusterId'] = int(RouteReflectorClusterId)
if AdjRIBOutFilter != None :
obj['AdjRIBOutFilter'] = AdjRIBOutFilter
if MaxPrefixesDisconnect != None :
obj['MaxPrefixesDisconnect'] = True if MaxPrefixesDisconnect else False
if PeerAS != None :
obj['PeerAS'] = PeerAS
if AddPathsMaxTx != None :
obj['AddPathsMaxTx'] = int(AddPathsMaxTx)
if AdjRIBInFilter != None :
obj['AdjRIBInFilter'] = AdjRIBInFilter
if MaxPrefixes != None :
obj['MaxPrefixes'] = int(MaxPrefixes)
if MaxPrefixesThresholdPct != None :
obj['MaxPrefixesThresholdPct'] = int(MaxPrefixesThresholdPct)
if BfdSessionParam != None :
obj['BfdSessionParam'] = BfdSessionParam
if NextHopSelf != None :
obj['NextHopSelf'] = True if NextHopSelf else False
if Disabled != None :
obj['Disabled'] = True if Disabled else False
if HoldTime != None :
obj['HoldTime'] = int(HoldTime)
if ConnectRetryTime != None :
obj['ConnectRetryTime'] = int(ConnectRetryTime)
reqUrl = self.cfgUrlBase+'BGPv4Neighbor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBGPv4NeighborById(self,
objectId,
BfdEnable = None,
PeerGroup = None,
MultiHopTTL = None,
LocalAS = None,
KeepaliveTime = None,
AddPathsRx = None,
UpdateSource = None,
RouteReflectorClient = None,
MaxPrefixesRestartTimer = None,
Description = None,
MultiHopEnable = None,
AuthPassword = None,
RouteReflectorClusterId = None,
AdjRIBOutFilter = None,
MaxPrefixesDisconnect = None,
PeerAS = None,
AddPathsMaxTx = None,
AdjRIBInFilter = None,
MaxPrefixes = None,
MaxPrefixesThresholdPct = None,
BfdSessionParam = None,
NextHopSelf = None,
Disabled = None,
HoldTime = None,
ConnectRetryTime = None):
obj = {}
if BfdEnable != None:
obj['BfdEnable'] = BfdEnable
if PeerGroup != None:
obj['PeerGroup'] = PeerGroup
if MultiHopTTL != None:
obj['MultiHopTTL'] = MultiHopTTL
if LocalAS != None:
obj['LocalAS'] = LocalAS
if KeepaliveTime != None:
obj['KeepaliveTime'] = KeepaliveTime
if AddPathsRx != None:
obj['AddPathsRx'] = AddPathsRx
if UpdateSource != None:
obj['UpdateSource'] = UpdateSource
if RouteReflectorClient != None:
obj['RouteReflectorClient'] = RouteReflectorClient
if MaxPrefixesRestartTimer != None:
obj['MaxPrefixesRestartTimer'] = MaxPrefixesRestartTimer
if Description != None:
obj['Description'] = Description
if MultiHopEnable != None:
obj['MultiHopEnable'] = MultiHopEnable
if AuthPassword != None:
obj['AuthPassword'] = AuthPassword
if RouteReflectorClusterId != None:
obj['RouteReflectorClusterId'] = RouteReflectorClusterId
if AdjRIBOutFilter != None:
obj['AdjRIBOutFilter'] = AdjRIBOutFilter
if MaxPrefixesDisconnect != None:
obj['MaxPrefixesDisconnect'] = MaxPrefixesDisconnect
if PeerAS != None:
obj['PeerAS'] = PeerAS
if AddPathsMaxTx != None:
obj['AddPathsMaxTx'] = AddPathsMaxTx
if AdjRIBInFilter != None:
obj['AdjRIBInFilter'] = AdjRIBInFilter
if MaxPrefixes != None:
obj['MaxPrefixes'] = MaxPrefixes
if MaxPrefixesThresholdPct != None:
obj['MaxPrefixesThresholdPct'] = MaxPrefixesThresholdPct
if BfdSessionParam != None:
obj['BfdSessionParam'] = BfdSessionParam
if NextHopSelf != None:
obj['NextHopSelf'] = NextHopSelf
if Disabled != None:
obj['Disabled'] = Disabled
if HoldTime != None:
obj['HoldTime'] = HoldTime
if ConnectRetryTime != None:
obj['ConnectRetryTime'] = ConnectRetryTime
reqUrl = self.cfgUrlBase+'BGPv4Neighbor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBGPv4Neighbor(self,
IntfRef,
NeighborAddress,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['NeighborAddress'] = NeighborAddress
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BGPv4Neighbor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBGPv4Neighbor(self,
IntfRef,
NeighborAddress):
obj = {
'IntfRef' : IntfRef,
'NeighborAddress' : NeighborAddress,
}
reqUrl = self.cfgUrlBase + 'BGPv4Neighbor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv4NeighborById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BGPv4Neighbor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv4Neighbors(self):
return self.getObjects('BGPv4Neighbor', self.cfgUrlBase)
"""
.. automethod :: executeSaveConfig(self,
:param string FileName : FileName for the saved config FileName for the saved config
"""
def executeSaveConfig(self,
FileName='startup-config'):
obj = {
'FileName' : FileName,
}
reqUrl = self.actionUrlBase+'SaveConfig'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: executeArpRefreshByIfName(self,
:param string IfName : All the Arp learned on given L3 interface will be re-learned All the Arp learned on given L3 interface will be re-learned
"""
def executeArpRefreshByIfName(self,
IfName):
obj = {
'IfName' : IfName,
}
reqUrl = self.actionUrlBase+'ArpRefreshByIfName'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: executeResetBGPv6NeighborByIPAddr(self,
:param string IPAddr : IP address of the BGP IPv6 neighbor to restart IP address of the BGP IPv6 neighbor to restart
"""
def executeResetBGPv6NeighborByIPAddr(self,
IPAddr):
obj = {
'IPAddr' : IPAddr,
}
reqUrl = self.actionUrlBase+'ResetBGPv6NeighborByIPAddr'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getCoppStatState(self,
Protocol):
obj = {
'Protocol' : Protocol,
}
reqUrl = self.stateUrlBase + 'CoppStat'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getCoppStatStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'CoppStat'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllCoppStatStates(self):
return self.getObjects('CoppStat', self.stateUrlBase)
"""
.. automethod :: createFMgrGlobal(self,
:param string Vrf : System Vrf System Vrf
:param bool Enable : Enable Fault Manager Enable Fault Manager
"""
def createFMgrGlobal(self,
Enable):
obj = {
'Vrf' : 'default',
'Enable' : True if Enable else False,
}
reqUrl = self.cfgUrlBase+'FMgrGlobal'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteFMgrGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase+'FMgrGlobal'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteFMgrGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase+'FMgrGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateFMgrGlobal(self,
Vrf,
Enable = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if Enable != None :
obj['Enable'] = True if Enable else False
reqUrl = self.cfgUrlBase+'FMgrGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateFMgrGlobalById(self,
objectId,
Enable = None):
obj = {}
if Enable != None:
obj['Enable'] = Enable
reqUrl = self.cfgUrlBase+'FMgrGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateFMgrGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'FMgrGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getFMgrGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'FMgrGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getFMgrGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'FMgrGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllFMgrGlobals(self):
return self.getObjects('FMgrGlobal', self.cfgUrlBase)
def updateNotifierEnable(self,
Vrf,
AlarmEnable = None,
FaultEnable = None,
EventEnable = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if AlarmEnable != None :
obj['AlarmEnable'] = True if AlarmEnable else False
if FaultEnable != None :
obj['FaultEnable'] = True if FaultEnable else False
if EventEnable != None :
obj['EventEnable'] = True if EventEnable else False
reqUrl = self.cfgUrlBase+'NotifierEnable'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateNotifierEnableById(self,
objectId,
AlarmEnable = None,
FaultEnable = None,
EventEnable = None):
obj = {}
if AlarmEnable != None:
obj['AlarmEnable'] = AlarmEnable
if FaultEnable != None:
obj['FaultEnable'] = FaultEnable
if EventEnable != None:
obj['EventEnable'] = EventEnable
reqUrl = self.cfgUrlBase+'NotifierEnable'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateNotifierEnable(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'NotifierEnable'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getNotifierEnable(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'NotifierEnable'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getNotifierEnableById(self, objectId ):
reqUrl = self.cfgUrlBase + 'NotifierEnable'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllNotifierEnables(self):
return self.getObjects('NotifierEnable', self.cfgUrlBase)
def updateDHCPRelayGlobal(self,
Vrf,
HopCountLimit = None,
Enable = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if HopCountLimit != None :
obj['HopCountLimit'] = int(HopCountLimit)
if Enable != None :
obj['Enable'] = True if Enable else False
reqUrl = self.cfgUrlBase+'DHCPRelayGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDHCPRelayGlobalById(self,
objectId,
HopCountLimit = None,
Enable = None):
obj = {}
if HopCountLimit != None:
obj['HopCountLimit'] = HopCountLimit
if Enable != None:
obj['Enable'] = Enable
reqUrl = self.cfgUrlBase+'DHCPRelayGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDHCPRelayGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DHCPRelayGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDHCPRelayGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'DHCPRelayGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPRelayGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DHCPRelayGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPRelayGlobals(self):
return self.getObjects('DHCPRelayGlobal', self.cfgUrlBase)
"""
.. automethod :: executeDWDMModuleSetBootPartition(self,
:param uint8 ModuleId : DWDM Module identifier DWDM Module identifier
:param string Partition : Active/StandBy Active/StandBy
"""
def executeDWDMModuleSetBootPartition(self,
ModuleId,
Partition):
obj = {
'ModuleId' : int(ModuleId),
'Partition' : Partition,
}
reqUrl = self.actionUrlBase+'DWDMModuleSetBootPartition'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: createLaPortChannel(self,
:param string IntfRef : Id of the lag group Id of the lag group
:param string IntfRefList : List of current member interfaces for the aggregate List of current member interfaces for the aggregate
:param uint16 MinLinks : Specifies the mininum number of member interfaces that must be active for the aggregate interface to be available Specifies the mininum number of member interfaces that must be active for the aggregate interface to be available
:param uint16 SystemPriority : Sytem priority used by the node on this LAG interface. Lower value is higher priority for determining which node is the controlling system. Sytem priority used by the node on this LAG interface. Lower value is higher priority for determining which node is the controlling system.
:param int32 Interval : Set the period between LACP messages -- uses the lacp-period-type enumeration. Set the period between LACP messages -- uses the lacp-period-type enumeration.
:param int32 LagHash : The tx hashing algorithm used by the lag group The tx hashing algorithm used by the lag group
:param string AdminState : Convenient way to disable/enable a lag group. The behaviour should be such that all traffic should stop. LACP frames should continue to be processed Convenient way to disable/enable a lag group. The behaviour should be such that all traffic should stop. LACP frames should continue to be processed
:param string SystemIdMac : The MAC address portion of the nodes System ID. This is combined with the system priority to construct the 8-octet system-id The MAC address portion of the nodes System ID. This is combined with the system priority to construct the 8-octet system-id
:param int32 LagType : Sets the type of LAG Sets the type of LAG
:param int32 LacpMode : ACTIVE is to initiate the transmission of LACP packets. PASSIVE is to wait for peer to initiate the transmission of LACP packets. ACTIVE is to initiate the transmission of LACP packets. PASSIVE is to wait for peer to initiate the transmission of LACP packets.
"""
def createLaPortChannel(self,
IntfRef,
IntfRefList,
MinLinks=1,
SystemPriority=32768,
Interval=1,
LagHash=0,
AdminState='UP',
SystemIdMac='00-00-00-00-00-00',
LagType=0,
LacpMode=0):
obj = {
'IntfRef' : IntfRef,
'IntfRefList' : IntfRefList,
'MinLinks' : int(MinLinks),
'SystemPriority' : int(SystemPriority),
'Interval' : int(Interval),
'LagHash' : int(LagHash),
'AdminState' : AdminState,
'SystemIdMac' : SystemIdMac,
'LagType' : int(LagType),
'LacpMode' : int(LacpMode),
}
reqUrl = self.cfgUrlBase+'LaPortChannel'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteLaPortChannel(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'LaPortChannel'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteLaPortChannelById(self, objectId ):
reqUrl = self.cfgUrlBase+'LaPortChannel'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateLaPortChannel(self,
IntfRef,
IntfRefList = None,
MinLinks = None,
SystemPriority = None,
Interval = None,
LagHash = None,
AdminState = None,
SystemIdMac = None,
LagType = None,
LacpMode = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if IntfRefList != None :
obj['IntfRefList'] = IntfRefList
if MinLinks != None :
obj['MinLinks'] = int(MinLinks)
if SystemPriority != None :
obj['SystemPriority'] = int(SystemPriority)
if Interval != None :
obj['Interval'] = int(Interval)
if LagHash != None :
obj['LagHash'] = int(LagHash)
if AdminState != None :
obj['AdminState'] = AdminState
if SystemIdMac != None :
obj['SystemIdMac'] = SystemIdMac
if LagType != None :
obj['LagType'] = int(LagType)
if LacpMode != None :
obj['LacpMode'] = int(LacpMode)
reqUrl = self.cfgUrlBase+'LaPortChannel'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateLaPortChannelById(self,
objectId,
IntfRefList = None,
MinLinks = None,
SystemPriority = None,
Interval = None,
LagHash = None,
AdminState = None,
SystemIdMac = None,
LagType = None,
LacpMode = None):
obj = {}
if IntfRefList != None:
obj['IntfRefList'] = IntfRefList
if MinLinks != None:
obj['MinLinks'] = MinLinks
if SystemPriority != None:
obj['SystemPriority'] = SystemPriority
if Interval != None:
obj['Interval'] = Interval
if LagHash != None:
obj['LagHash'] = LagHash
if AdminState != None:
obj['AdminState'] = AdminState
if SystemIdMac != None:
obj['SystemIdMac'] = SystemIdMac
if LagType != None:
obj['LagType'] = LagType
if LacpMode != None:
obj['LacpMode'] = LacpMode
reqUrl = self.cfgUrlBase+'LaPortChannel'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateLaPortChannel(self,
IntfRef,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'LaPortChannel'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getLaPortChannel(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'LaPortChannel'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLaPortChannelById(self, objectId ):
reqUrl = self.cfgUrlBase + 'LaPortChannel'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLaPortChannels(self):
return self.getObjects('LaPortChannel', self.cfgUrlBase)
"""
.. automethod :: executeResetConfig(self,
"""
def executeResetConfig(self):
obj = {
}
reqUrl = self.actionUrlBase+'ResetConfig'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getApiInfoState(self,
Url):
obj = {
'Url' : Url,
}
reqUrl = self.stateUrlBase + 'ApiInfo'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getApiInfoStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'ApiInfo'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllApiInfoStates(self):
return self.getObjects('ApiInfo', self.stateUrlBase)
def getFanSensorState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'FanSensor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getFanSensorStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'FanSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllFanSensorStates(self):
return self.getObjects('FanSensor', self.stateUrlBase)
"""
.. automethod :: createBfdSessionParam(self,
:param string Name : Session parameters Session parameters
:param uint32 RequiredMinRxInterval : Required minimum rx interval in ms Required minimum rx interval in ms
:param string AuthData : Authentication password Authentication password
:param bool DemandEnabled : Enable or disable demand mode Enable or disable demand mode
:param uint32 AuthKeyId : Authentication key id Authentication key id
:param string AuthType : Authentication type Authentication type
:param uint32 DesiredMinTxInterval : Desired minimum tx interval in ms Desired minimum tx interval in ms
:param bool AuthenticationEnabled : Enable or disable authentication Enable or disable authentication
:param uint32 RequiredMinEchoRxInterval : Required minimum echo rx interval in ms Required minimum echo rx interval in ms
:param uint32 LocalMultiplier : Detection multiplier Detection multiplier
"""
def createBfdSessionParam(self,
Name,
RequiredMinRxInterval=1000,
AuthData='snaproute',
DemandEnabled=False,
AuthKeyId=1,
AuthType='simple',
DesiredMinTxInterval=1000,
AuthenticationEnabled=False,
RequiredMinEchoRxInterval=0,
LocalMultiplier=3):
obj = {
'Name' : Name,
'RequiredMinRxInterval' : int(RequiredMinRxInterval),
'AuthData' : AuthData,
'DemandEnabled' : True if DemandEnabled else False,
'AuthKeyId' : int(AuthKeyId),
'AuthType' : AuthType,
'DesiredMinTxInterval' : int(DesiredMinTxInterval),
'AuthenticationEnabled' : True if AuthenticationEnabled else False,
'RequiredMinEchoRxInterval' : int(RequiredMinEchoRxInterval),
'LocalMultiplier' : int(LocalMultiplier),
}
reqUrl = self.cfgUrlBase+'BfdSessionParam'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBfdSessionParam(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'BfdSessionParam'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBfdSessionParamById(self, objectId ):
reqUrl = self.cfgUrlBase+'BfdSessionParam'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateBfdSessionParam(self,
Name,
RequiredMinRxInterval = None,
AuthData = None,
DemandEnabled = None,
AuthKeyId = None,
AuthType = None,
DesiredMinTxInterval = None,
AuthenticationEnabled = None,
RequiredMinEchoRxInterval = None,
LocalMultiplier = None):
obj = {}
if Name != None :
obj['Name'] = Name
if RequiredMinRxInterval != None :
obj['RequiredMinRxInterval'] = int(RequiredMinRxInterval)
if AuthData != None :
obj['AuthData'] = AuthData
if DemandEnabled != None :
obj['DemandEnabled'] = True if DemandEnabled else False
if AuthKeyId != None :
obj['AuthKeyId'] = int(AuthKeyId)
if AuthType != None :
obj['AuthType'] = AuthType
if DesiredMinTxInterval != None :
obj['DesiredMinTxInterval'] = int(DesiredMinTxInterval)
if AuthenticationEnabled != None :
obj['AuthenticationEnabled'] = True if AuthenticationEnabled else False
if RequiredMinEchoRxInterval != None :
obj['RequiredMinEchoRxInterval'] = int(RequiredMinEchoRxInterval)
if LocalMultiplier != None :
obj['LocalMultiplier'] = int(LocalMultiplier)
reqUrl = self.cfgUrlBase+'BfdSessionParam'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBfdSessionParamById(self,
objectId,
RequiredMinRxInterval = None,
AuthData = None,
DemandEnabled = None,
AuthKeyId = None,
AuthType = None,
DesiredMinTxInterval = None,
AuthenticationEnabled = None,
RequiredMinEchoRxInterval = None,
LocalMultiplier = None):
obj = {}
if RequiredMinRxInterval != None:
obj['RequiredMinRxInterval'] = RequiredMinRxInterval
if AuthData != None:
obj['AuthData'] = AuthData
if DemandEnabled != None:
obj['DemandEnabled'] = DemandEnabled
if AuthKeyId != None:
obj['AuthKeyId'] = AuthKeyId
if AuthType != None:
obj['AuthType'] = AuthType
if DesiredMinTxInterval != None:
obj['DesiredMinTxInterval'] = DesiredMinTxInterval
if AuthenticationEnabled != None:
obj['AuthenticationEnabled'] = AuthenticationEnabled
if RequiredMinEchoRxInterval != None:
obj['RequiredMinEchoRxInterval'] = RequiredMinEchoRxInterval
if LocalMultiplier != None:
obj['LocalMultiplier'] = LocalMultiplier
reqUrl = self.cfgUrlBase+'BfdSessionParam'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBfdSessionParam(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BfdSessionParam'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBfdSessionParam(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'BfdSessionParam'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBfdSessionParamById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BfdSessionParam'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBfdSessionParams(self):
return self.getObjects('BfdSessionParam', self.cfgUrlBase)
def getConfigLogState(self,
SeqNum,
API,
Time):
obj = {
'SeqNum' : int(SeqNum),
'API' : API,
'Time' : Time,
}
reqUrl = self.stateUrlBase + 'ConfigLog'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getConfigLogStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'ConfigLog'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllConfigLogStates(self):
return self.getObjects('ConfigLog', self.stateUrlBase)
"""
.. automethod :: createRedistributionPolicy(self,
:param string Source : Source Protocol for redistribution Source Protocol for redistribution
:param string Target : Target protocol for redistribution Target protocol for redistribution
:param string Policy : Policy to be applied from source to Target Policy to be applied from source to Target
"""
def createRedistributionPolicy(self,
Source,
Target,
Policy):
obj = {
'Source' : Source,
'Target' : Target,
'Policy' : Policy,
}
reqUrl = self.cfgUrlBase+'RedistributionPolicy'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteRedistributionPolicy(self,
Source,
Target):
obj = {
'Source' : Source,
'Target' : Target,
}
reqUrl = self.cfgUrlBase+'RedistributionPolicy'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteRedistributionPolicyById(self, objectId ):
reqUrl = self.cfgUrlBase+'RedistributionPolicy'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateRedistributionPolicy(self,
Source,
Target,
Policy = None):
obj = {}
if Source != None :
obj['Source'] = Source
if Target != None :
obj['Target'] = Target
if Policy != None :
obj['Policy'] = Policy
reqUrl = self.cfgUrlBase+'RedistributionPolicy'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateRedistributionPolicyById(self,
objectId,
Policy = None):
obj = {}
if Policy != None:
obj['Policy'] = Policy
reqUrl = self.cfgUrlBase+'RedistributionPolicy'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateRedistributionPolicy(self,
Source,
Target,
op,
path,
value,):
obj = {}
obj['Source'] = Source
obj['Target'] = Target
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'RedistributionPolicy'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getRedistributionPolicy(self,
Source,
Target):
obj = {
'Source' : Source,
'Target' : Target,
}
reqUrl = self.cfgUrlBase + 'RedistributionPolicy'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getRedistributionPolicyById(self, objectId ):
reqUrl = self.cfgUrlBase + 'RedistributionPolicy'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllRedistributionPolicys(self):
return self.getObjects('RedistributionPolicy', self.cfgUrlBase)
def getVxlanVtepInstanceState(self,
Intf,
Vni):
obj = {
'Intf' : Intf,
'Vni' : int(Vni),
}
reqUrl = self.stateUrlBase + 'VxlanVtepInstance'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVxlanVtepInstanceStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'VxlanVtepInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVxlanVtepInstanceStates(self):
return self.getObjects('VxlanVtepInstance', self.stateUrlBase)
def getDHCPRelayIntfServerState(self,
IntfRef,
ServerIp):
obj = {
'IntfRef' : IntfRef,
'ServerIp' : ServerIp,
}
reqUrl = self.stateUrlBase + 'DHCPRelayIntfServer'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPRelayIntfServerStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DHCPRelayIntfServer'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPRelayIntfServerStates(self):
return self.getObjects('DHCPRelayIntfServer', self.stateUrlBase)
def getDWDMModuleState(self,
ModuleId):
obj = {
'ModuleId' : int(ModuleId),
}
reqUrl = self.stateUrlBase + 'DWDMModule'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDWDMModuleStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DWDMModule'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDWDMModuleStates(self):
return self.getObjects('DWDMModule', self.stateUrlBase)
def getNDPEntryState(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.stateUrlBase + 'NDPEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getNDPEntryStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'NDPEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllNDPEntryStates(self):
return self.getObjects('NDPEntry', self.stateUrlBase)
def getLaPortChannelIntfRefListState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'LaPortChannelIntfRefList'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLaPortChannelIntfRefListStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'LaPortChannelIntfRefList'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLaPortChannelIntfRefListStates(self):
return self.getObjects('LaPortChannelIntfRefList', self.stateUrlBase)
"""
.. automethod :: createQsfp(self,
:param int32 QsfpId : Qsfp Id Qsfp Id
:param float64 HigherAlarmTemperature : Higher Alarm temperature threshold for TCA Higher Alarm temperature threshold for TCA
:param float64 HigherAlarmVoltage : Higher Alarm Voltage threshold for TCA Higher Alarm Voltage threshold for TCA
:param float64 HigherWarningTemperature : Higher Warning temperature threshold for TCA Higher Warning temperature threshold for TCA
:param float64 HigherWarningVoltage : Higher Warning Voltage threshold for TCA Higher Warning Voltage threshold for TCA
:param float64 LowerAlarmTemperature : Lower Alarm temperature threshold for TCA Lower Alarm temperature threshold for TCA
:param float64 LowerAlarmVoltage : Lower Alarm Voltage threshold for TCA Lower Alarm Voltage threshold for TCA
:param float64 LowerWarningTemperature : Lower Warning temperature threshold for TCA Lower Warning temperature threshold for TCA
:param float64 LowerWarningVoltage : Lower Warning Voltage threshold for TCA Lower Warning Voltage threshold for TCA
:param string PMClassBAdminState : PM Class-B Admin State PM Class-B Admin State
:param string PMClassCAdminState : PM Class-C Admin State PM Class-C Admin State
:param string PMClassAAdminState : PM Class-A Admin State PM Class-A Admin State
:param string AdminState : Enable/Disable Enable/Disable
"""
def createQsfp(self,
QsfpId,
HigherAlarmTemperature,
HigherAlarmVoltage,
HigherWarningTemperature,
HigherWarningVoltage,
LowerAlarmTemperature,
LowerAlarmVoltage,
LowerWarningTemperature,
LowerWarningVoltage,
PMClassBAdminState='Disable',
PMClassCAdminState='Disable',
PMClassAAdminState='Disable',
AdminState='Disable'):
obj = {
'QsfpId' : int(QsfpId),
'HigherAlarmTemperature' : HigherAlarmTemperature,
'HigherAlarmVoltage' : HigherAlarmVoltage,
'HigherWarningTemperature' : HigherWarningTemperature,
'HigherWarningVoltage' : HigherWarningVoltage,
'LowerAlarmTemperature' : LowerAlarmTemperature,
'LowerAlarmVoltage' : LowerAlarmVoltage,
'LowerWarningTemperature' : LowerWarningTemperature,
'LowerWarningVoltage' : LowerWarningVoltage,
'PMClassBAdminState' : PMClassBAdminState,
'PMClassCAdminState' : PMClassCAdminState,
'PMClassAAdminState' : PMClassAAdminState,
'AdminState' : AdminState,
}
reqUrl = self.cfgUrlBase+'Qsfp'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteQsfp(self,
QsfpId):
obj = {
'QsfpId' : QsfpId,
}
reqUrl = self.cfgUrlBase+'Qsfp'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteQsfpById(self, objectId ):
reqUrl = self.cfgUrlBase+'Qsfp'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateQsfp(self,
QsfpId,
HigherAlarmTemperature = None,
HigherAlarmVoltage = None,
HigherWarningTemperature = None,
HigherWarningVoltage = None,
LowerAlarmTemperature = None,
LowerAlarmVoltage = None,
LowerWarningTemperature = None,
LowerWarningVoltage = None,
PMClassBAdminState = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None):
obj = {}
if QsfpId != None :
obj['QsfpId'] = int(QsfpId)
if HigherAlarmTemperature != None :
obj['HigherAlarmTemperature'] = HigherAlarmTemperature
if HigherAlarmVoltage != None :
obj['HigherAlarmVoltage'] = HigherAlarmVoltage
if HigherWarningTemperature != None :
obj['HigherWarningTemperature'] = HigherWarningTemperature
if HigherWarningVoltage != None :
obj['HigherWarningVoltage'] = HigherWarningVoltage
if LowerAlarmTemperature != None :
obj['LowerAlarmTemperature'] = LowerAlarmTemperature
if LowerAlarmVoltage != None :
obj['LowerAlarmVoltage'] = LowerAlarmVoltage
if LowerWarningTemperature != None :
obj['LowerWarningTemperature'] = LowerWarningTemperature
if LowerWarningVoltage != None :
obj['LowerWarningVoltage'] = LowerWarningVoltage
if PMClassBAdminState != None :
obj['PMClassBAdminState'] = PMClassBAdminState
if PMClassCAdminState != None :
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None :
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'Qsfp'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateQsfpById(self,
objectId,
HigherAlarmTemperature = None,
HigherAlarmVoltage = None,
HigherWarningTemperature = None,
HigherWarningVoltage = None,
LowerAlarmTemperature = None,
LowerAlarmVoltage = None,
LowerWarningTemperature = None,
LowerWarningVoltage = None,
PMClassBAdminState = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None):
obj = {}
if HigherAlarmTemperature != None:
obj['HigherAlarmTemperature'] = HigherAlarmTemperature
if HigherAlarmVoltage != None:
obj['HigherAlarmVoltage'] = HigherAlarmVoltage
if HigherWarningTemperature != None:
obj['HigherWarningTemperature'] = HigherWarningTemperature
if HigherWarningVoltage != None:
obj['HigherWarningVoltage'] = HigherWarningVoltage
if LowerAlarmTemperature != None:
obj['LowerAlarmTemperature'] = LowerAlarmTemperature
if LowerAlarmVoltage != None:
obj['LowerAlarmVoltage'] = LowerAlarmVoltage
if LowerWarningTemperature != None:
obj['LowerWarningTemperature'] = LowerWarningTemperature
if LowerWarningVoltage != None:
obj['LowerWarningVoltage'] = LowerWarningVoltage
if PMClassBAdminState != None:
obj['PMClassBAdminState'] = PMClassBAdminState
if PMClassCAdminState != None:
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None:
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'Qsfp'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateQsfp(self,
QsfpId,
op,
path,
value,):
obj = {}
obj['QsfpId'] = QsfpId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Qsfp'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getQsfp(self,
QsfpId):
obj = {
'QsfpId' : int(QsfpId),
}
reqUrl = self.cfgUrlBase + 'Qsfp'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getQsfpById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Qsfp'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllQsfps(self):
return self.getObjects('Qsfp', self.cfgUrlBase)
def getOspfv2AreaState(self,
AreaId):
obj = {
'AreaId' : AreaId,
}
reqUrl = self.stateUrlBase + 'Ospfv2Area'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfv2AreaStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Ospfv2Area'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfv2AreaStates(self):
return self.getObjects('Ospfv2Area', self.stateUrlBase)
def getBfdSessionParamState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'BfdSessionParam'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBfdSessionParamStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BfdSessionParam'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBfdSessionParamStates(self):
return self.getObjects('BfdSessionParam', self.stateUrlBase)
"""
.. automethod :: createAclMacFilter(self,
:param string FilterName : MAC filter name . MAC filter name .
:param string DestMac : Destination MAC address Destination MAC address
:param string SourceMask : Destination MAC address Destination MAC address
:param string DestMask : Source MAC address Source MAC address
:param string SourceMac : Source MAC address. Source MAC address.
"""
def createAclMacFilter(self,
FilterName,
DestMac='',
SourceMask='FF',
DestMask='FF',
SourceMac=''):
obj = {
'FilterName' : FilterName,
'DestMac' : DestMac,
'SourceMask' : SourceMask,
'DestMask' : DestMask,
'SourceMac' : SourceMac,
}
reqUrl = self.cfgUrlBase+'AclMacFilter'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAclMacFilter(self,
FilterName):
obj = {
'FilterName' : FilterName,
}
reqUrl = self.cfgUrlBase+'AclMacFilter'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAclMacFilterById(self, objectId ):
reqUrl = self.cfgUrlBase+'AclMacFilter'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateAclMacFilter(self,
FilterName,
DestMac = None,
SourceMask = None,
DestMask = None,
SourceMac = None):
obj = {}
if FilterName != None :
obj['FilterName'] = FilterName
if DestMac != None :
obj['DestMac'] = DestMac
if SourceMask != None :
obj['SourceMask'] = SourceMask
if DestMask != None :
obj['DestMask'] = DestMask
if SourceMac != None :
obj['SourceMac'] = SourceMac
reqUrl = self.cfgUrlBase+'AclMacFilter'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateAclMacFilterById(self,
objectId,
DestMac = None,
SourceMask = None,
DestMask = None,
SourceMac = None):
obj = {}
if DestMac != None:
obj['DestMac'] = DestMac
if SourceMask != None:
obj['SourceMask'] = SourceMask
if DestMask != None:
obj['DestMask'] = DestMask
if SourceMac != None:
obj['SourceMac'] = SourceMac
reqUrl = self.cfgUrlBase+'AclMacFilter'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateAclMacFilter(self,
FilterName,
op,
path,
value,):
obj = {}
obj['FilterName'] = FilterName
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'AclMacFilter'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getAclMacFilter(self,
FilterName):
obj = {
'FilterName' : FilterName,
}
reqUrl = self.cfgUrlBase + 'AclMacFilter'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAclMacFilterById(self, objectId ):
reqUrl = self.cfgUrlBase + 'AclMacFilter'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAclMacFilters(self):
return self.getObjects('AclMacFilter', self.cfgUrlBase)
"""
.. automethod :: executeDaemon(self,
:param string Name : FlexSwitch daemon name FlexSwitch daemon name
:param string Op : Start Start
:param bool WatchDog : Enable watchdog for daemon Enable watchdog for daemon
"""
def executeDaemon(self,
Name,
Op,
WatchDog):
obj = {
'Name' : Name,
'Op' : Op,
'WatchDog' : True if WatchDog else False,
}
reqUrl = self.actionUrlBase+'Daemon'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getAsicGlobalState(self,
ModuleId):
obj = {
'ModuleId' : int(ModuleId),
}
reqUrl = self.stateUrlBase + 'AsicGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAsicGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'AsicGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAsicGlobalStates(self):
return self.getObjects('AsicGlobal', self.stateUrlBase)
def getOspfLsdbEntryState(self,
LsdbType,
LsdbAreaId,
LsdbLsid,
LsdbRouterId):
obj = {
'LsdbType' : int(LsdbType),
'LsdbAreaId' : LsdbAreaId,
'LsdbLsid' : LsdbLsid,
'LsdbRouterId' : LsdbRouterId,
}
reqUrl = self.stateUrlBase + 'OspfLsdbEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfLsdbEntryStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'OspfLsdbEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfLsdbEntryStates(self):
return self.getObjects('OspfLsdbEntry', self.stateUrlBase)
def getArpLinuxEntryState(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.stateUrlBase + 'ArpLinuxEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getArpLinuxEntryStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'ArpLinuxEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllArpLinuxEntryStates(self):
return self.getObjects('ArpLinuxEntry', self.stateUrlBase)
def updateStpGlobal(self,
Vrf,
AdminState = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'StpGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateStpGlobalById(self,
objectId,
AdminState = None):
obj = {}
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'StpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateStpGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'StpGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getStpGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'StpGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getStpGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'StpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllStpGlobals(self):
return self.getObjects('StpGlobal', self.cfgUrlBase)
"""
.. automethod :: createDistributedRelay(self,
:param string DrniName : The unique identifier allocated to this Distributed Relay by the local System. This attribute identifies a Distributed Relay instance among the subordinate managed objects of the containing object. The unique identifier allocated to this Distributed Relay by the local System. This attribute identifies a Distributed Relay instance among the subordinate managed objects of the containing object.
:param string PortalAddress : A read-write identifier of a particular Portal. Portal-Addr has to be unique among at least all of the potential Portal Systems to which a given Portal System might be attached via an IPL Intra-Portal Link. Also used as the Actors System ID (6.3.2) for the emulated system A read-write identifier of a particular Portal. Portal-Addr has to be unique among at least all of the potential Portal Systems to which a given Portal System might be attached via an IPL Intra-Portal Link. Also used as the Actors System ID (6.3.2) for the emulated system
:param uint8 PortalSystemNumber : A read-write identifier of this particular Portal System within a Portal. It is the responsibility of the network administrator to ensure that these numbers are unique among the Portal Systems with the same aDrniPortalAddr (7.4.1.1.4) A read-write identifier of this particular Portal System within a Portal. It is the responsibility of the network administrator to ensure that these numbers are unique among the Portal Systems with the same aDrniPortalAddr (7.4.1.1.4)
:param string IntfReflist : Read-write list of the Interface Identifiers of the Ports to the Intra-Portal Links assigned to this Distributed Relay. Each Interface Identifier Read-write list of the Interface Identifiers of the Ports to the Intra-Portal Links assigned to this Distributed Relay. Each Interface Identifier
:param string IntfRef : Read-write Interface Identifier of the Aggregator Port assigned to this Distributed Relay Read-write Interface Identifier of the Aggregator Port assigned to this Distributed Relay
:param uint16 PortalPriority : A 2octet read-write value indicating the priority value associated with the Portals System ID. Also used as the Actors System Priority (6.3.2) for the emulated system. A 2octet read-write value indicating the priority value associated with the Portals System ID. Also used as the Actors System Priority (6.3.2) for the emulated system.
:param string GatewayAlgorithm : This object identifies the algorithm used by the DR Function to assign frames to a Gateway Conversation ID. Table 9-7 provides the IEEE 802.1 OUI (00 This object identifies the algorithm used by the DR Function to assign frames to a Gateway Conversation ID. Table 9-7 provides the IEEE 802.1 OUI (00
:param string NeighborAdminDRCPState : A string of 8 bits A string of 8 bits
:param string NeighborGatewayAlgorithm : TThis object identifies the value for the Gateway algorithm of the Neighbor Portal System TThis object identifies the value for the Gateway algorithm of the Neighbor Portal System
:param bool ThreePortalSystem : A read-write Boolean value indicating whether this Portal System is part of a Portal consisting of three Portal Systems or not. Value 1 stands for a Portal of three Portal Systems A read-write Boolean value indicating whether this Portal System is part of a Portal consisting of three Portal Systems or not. Value 1 stands for a Portal of three Portal Systems
:param string IntraPortalPortProtocolDA : A 6-octet read-write MAC Address value specifying the DA to be used when sending DRCPDUs A 6-octet read-write MAC Address value specifying the DA to be used when sending DRCPDUs
:param string NeighborPortAlgorithm : This object identifies the value for the Port Algorithm of the Neighbor Portal System This object identifies the value for the Port Algorithm of the Neighbor Portal System
:param string EncapMethod : This managed object is applicable only when Network / IPL sharing by time (9.3.2.1) or Network / IPL sharing by tag (9.3.2.2) or Network / IPL sharing by encapsulation (9.3.2.3) is supported. The object identifies the value representing the encapsulation method that is used to transport IPL frames to the Neighbor Portal System when the IPL and network link are sharing the same physical link. It consists of the 3-octet OUI or CID identifying the organization that is responsible for this encapsulation and one following octet used to identify the encapsulation method defined by that organization. Table 9-11 provides the IEEE 802.1 OUI (00-80-C2) encapsulation method encodings. A Default value of 0x00-80-C2-00 indicates that the IPL is using a separate physical or Aggregation link. A value of 1 indicates that Network / IPL sharing by time (9.3.2.1) is used. A value of 2 indicates that the encapsulation method used is the same as the one used by network frames and that Network / IPL sharing by tag (9.3.2.2) is used This managed object is applicable only when Network / IPL sharing by time (9.3.2.1) or Network / IPL sharing by tag (9.3.2.2) or Network / IPL sharing by encapsulation (9.3.2.3) is supported. The object identifies the value representing the encapsulation method that is used to transport IPL frames to the Neighbor Portal System when the IPL and network link are sharing the same physical link. It consists of the 3-octet OUI or CID identifying the organization that is responsible for this encapsulation and one following octet used to identify the encapsulation method defined by that organization. Table 9-11 provides the IEEE 802.1 OUI (00-80-C2) encapsulation method encodings. A Default value of 0x00-80-C2-00 indicates that the IPL is using a separate physical or Aggregation link. A value of 1 indicates that Network / IPL sharing by time (9.3.2.1) is used. A value of 2 indicates that the encapsulation method used is the same as the one used by network frames and that Network / IPL sharing by tag (9.3.2.2) is used
"""
def createDistributedRelay(self,
DrniName,
PortalAddress,
PortalSystemNumber,
IntfReflist,
IntfRef,
PortalPriority=32768,
GatewayAlgorithm='00-80-C2-01',
NeighborAdminDRCPState='00000000',
NeighborGatewayAlgorithm='00-80-C2-01',
ThreePortalSystem=False,
IntraPortalPortProtocolDA='01-80-C2-00-00-03',
NeighborPortAlgorithm='00-80-C2-01',
EncapMethod='00-80-C2-01'):
obj = {
'DrniName' : DrniName,
'PortalAddress' : PortalAddress,
'PortalSystemNumber' : int(PortalSystemNumber),
'IntfReflist' : IntfReflist,
'IntfRef' : IntfRef,
'PortalPriority' : int(PortalPriority),
'GatewayAlgorithm' : GatewayAlgorithm,
'NeighborAdminDRCPState' : NeighborAdminDRCPState,
'NeighborGatewayAlgorithm' : NeighborGatewayAlgorithm,
'ThreePortalSystem' : True if ThreePortalSystem else False,
'IntraPortalPortProtocolDA' : IntraPortalPortProtocolDA,
'NeighborPortAlgorithm' : NeighborPortAlgorithm,
'EncapMethod' : EncapMethod,
}
reqUrl = self.cfgUrlBase+'DistributedRelay'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDistributedRelay(self,
DrniName):
obj = {
'DrniName' : DrniName,
}
reqUrl = self.cfgUrlBase+'DistributedRelay'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDistributedRelayById(self, objectId ):
reqUrl = self.cfgUrlBase+'DistributedRelay'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateDistributedRelay(self,
DrniName,
PortalAddress = None,
PortalSystemNumber = None,
IntfReflist = None,
IntfRef = None,
PortalPriority = None,
GatewayAlgorithm = None,
NeighborAdminDRCPState = None,
NeighborGatewayAlgorithm = None,
ThreePortalSystem = None,
IntraPortalPortProtocolDA = None,
NeighborPortAlgorithm = None,
EncapMethod = None):
obj = {}
if DrniName != None :
obj['DrniName'] = DrniName
if PortalAddress != None :
obj['PortalAddress'] = PortalAddress
if PortalSystemNumber != None :
obj['PortalSystemNumber'] = int(PortalSystemNumber)
if IntfReflist != None :
obj['IntfReflist'] = IntfReflist
if IntfRef != None :
obj['IntfRef'] = IntfRef
if PortalPriority != None :
obj['PortalPriority'] = int(PortalPriority)
if GatewayAlgorithm != None :
obj['GatewayAlgorithm'] = GatewayAlgorithm
if NeighborAdminDRCPState != None :
obj['NeighborAdminDRCPState'] = NeighborAdminDRCPState
if NeighborGatewayAlgorithm != None :
obj['NeighborGatewayAlgorithm'] = NeighborGatewayAlgorithm
if ThreePortalSystem != None :
obj['ThreePortalSystem'] = True if ThreePortalSystem else False
if IntraPortalPortProtocolDA != None :
obj['IntraPortalPortProtocolDA'] = IntraPortalPortProtocolDA
if NeighborPortAlgorithm != None :
obj['NeighborPortAlgorithm'] = NeighborPortAlgorithm
if EncapMethod != None :
obj['EncapMethod'] = EncapMethod
reqUrl = self.cfgUrlBase+'DistributedRelay'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDistributedRelayById(self,
objectId,
PortalAddress = None,
PortalSystemNumber = None,
IntfReflist = None,
IntfRef = None,
PortalPriority = None,
GatewayAlgorithm = None,
NeighborAdminDRCPState = None,
NeighborGatewayAlgorithm = None,
ThreePortalSystem = None,
IntraPortalPortProtocolDA = None,
NeighborPortAlgorithm = None,
EncapMethod = None):
obj = {}
if PortalAddress != None:
obj['PortalAddress'] = PortalAddress
if PortalSystemNumber != None:
obj['PortalSystemNumber'] = PortalSystemNumber
if IntfReflist != None:
obj['IntfReflist'] = IntfReflist
if IntfRef != None:
obj['IntfRef'] = IntfRef
if PortalPriority != None:
obj['PortalPriority'] = PortalPriority
if GatewayAlgorithm != None:
obj['GatewayAlgorithm'] = GatewayAlgorithm
if NeighborAdminDRCPState != None:
obj['NeighborAdminDRCPState'] = NeighborAdminDRCPState
if NeighborGatewayAlgorithm != None:
obj['NeighborGatewayAlgorithm'] = NeighborGatewayAlgorithm
if ThreePortalSystem != None:
obj['ThreePortalSystem'] = ThreePortalSystem
if IntraPortalPortProtocolDA != None:
obj['IntraPortalPortProtocolDA'] = IntraPortalPortProtocolDA
if NeighborPortAlgorithm != None:
obj['NeighborPortAlgorithm'] = NeighborPortAlgorithm
if EncapMethod != None:
obj['EncapMethod'] = EncapMethod
reqUrl = self.cfgUrlBase+'DistributedRelay'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDistributedRelay(self,
DrniName,
op,
path,
value,):
obj = {}
obj['DrniName'] = DrniName
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DistributedRelay'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDistributedRelay(self,
DrniName):
obj = {
'DrniName' : DrniName,
}
reqUrl = self.cfgUrlBase + 'DistributedRelay'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDistributedRelayById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DistributedRelay'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDistributedRelays(self):
return self.getObjects('DistributedRelay', self.cfgUrlBase)
def updateOspfGlobal(self,
Vrf,
AdminStat = None,
ASBdrRtrStatus = None,
RestartSupport = None,
RestartInterval = None,
RouterId = None,
TOSSupport = None,
ReferenceBandwidth = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if AdminStat != None :
obj['AdminStat'] = int(AdminStat)
if ASBdrRtrStatus != None :
obj['ASBdrRtrStatus'] = True if ASBdrRtrStatus else False
if RestartSupport != None :
obj['RestartSupport'] = int(RestartSupport)
if RestartInterval != None :
obj['RestartInterval'] = int(RestartInterval)
if RouterId != None :
obj['RouterId'] = RouterId
if TOSSupport != None :
obj['TOSSupport'] = True if TOSSupport else False
if ReferenceBandwidth != None :
obj['ReferenceBandwidth'] = int(ReferenceBandwidth)
reqUrl = self.cfgUrlBase+'OspfGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateOspfGlobalById(self,
objectId,
AdminStat = None,
ASBdrRtrStatus = None,
RestartSupport = None,
RestartInterval = None,
RouterId = None,
TOSSupport = None,
ReferenceBandwidth = None):
obj = {}
if AdminStat != None:
obj['AdminStat'] = AdminStat
if ASBdrRtrStatus != None:
obj['ASBdrRtrStatus'] = ASBdrRtrStatus
if RestartSupport != None:
obj['RestartSupport'] = RestartSupport
if RestartInterval != None:
obj['RestartInterval'] = RestartInterval
if RouterId != None:
obj['RouterId'] = RouterId
if TOSSupport != None:
obj['TOSSupport'] = TOSSupport
if ReferenceBandwidth != None:
obj['ReferenceBandwidth'] = ReferenceBandwidth
reqUrl = self.cfgUrlBase+'OspfGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateOspfGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'OspfGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getOspfGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'OspfGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'OspfGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfGlobals(self):
return self.getObjects('OspfGlobal', self.cfgUrlBase)
"""
.. automethod :: executeResetBGPv4NeighborByInterface(self,
:param string IntfRef : Interface of the BGP IPv4 neighbor to restart Interface of the BGP IPv4 neighbor to restart
"""
def executeResetBGPv4NeighborByInterface(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.actionUrlBase+'ResetBGPv4NeighborByInterface'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getLLDPIntfState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'LLDPIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLLDPIntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'LLDPIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLLDPIntfStates(self):
return self.getObjects('LLDPIntf', self.stateUrlBase)
"""
.. automethod :: createLed(self,
:param int32 LedId : LED id LED id
:param string LedAdmin : LED ON/OFF LED ON/OFF
:param string LedSetColor : LED set color LED set color
"""
def createLed(self,
LedAdmin,
LedSetColor):
obj = {
'LedId' : int(0),
'LedAdmin' : LedAdmin,
'LedSetColor' : LedSetColor,
}
reqUrl = self.cfgUrlBase+'Led'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteLed(self,
LedId):
obj = {
'LedId' : LedId,
}
reqUrl = self.cfgUrlBase+'Led'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteLedById(self, objectId ):
reqUrl = self.cfgUrlBase+'Led'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateLed(self,
LedId,
LedAdmin = None,
LedSetColor = None):
obj = {}
if LedId != None :
obj['LedId'] = int(LedId)
if LedAdmin != None :
obj['LedAdmin'] = LedAdmin
if LedSetColor != None :
obj['LedSetColor'] = LedSetColor
reqUrl = self.cfgUrlBase+'Led'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateLedById(self,
objectId,
LedAdmin = None,
LedSetColor = None):
obj = {}
if LedAdmin != None:
obj['LedAdmin'] = LedAdmin
if LedSetColor != None:
obj['LedSetColor'] = LedSetColor
reqUrl = self.cfgUrlBase+'Led'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateLed(self,
LedId,
op,
path,
value,):
obj = {}
obj['LedId'] = LedId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Led'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getLed(self,
LedId):
obj = {
'LedId' : int(LedId),
}
reqUrl = self.cfgUrlBase + 'Led'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLedById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Led'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLeds(self):
return self.getObjects('Led', self.cfgUrlBase)
def getPolicyCommunitySetState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'PolicyCommunitySet'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyCommunitySetStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PolicyCommunitySet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyCommunitySetStates(self):
return self.getObjects('PolicyCommunitySet', self.stateUrlBase)
def getFaultState(self,
EventId,
EventName,
SrcObjName,
OwnerName,
OwnerId):
obj = {
'EventId' : int(EventId),
'EventName' : EventName,
'SrcObjName' : SrcObjName,
'OwnerName' : OwnerName,
'OwnerId' : int(OwnerId),
}
reqUrl = self.stateUrlBase + 'Fault'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getFaultStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Fault'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllFaultStates(self):
return self.getObjects('Fault', self.stateUrlBase)
def getAclState(self,
AclName):
obj = {
'AclName' : AclName,
}
reqUrl = self.stateUrlBase + 'Acl'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAclStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Acl'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAclStates(self):
return self.getObjects('Acl', self.stateUrlBase)
"""
.. automethod :: createEthernetPM(self,
:param string Resource : Resource identifier Resource identifier
:param string IntfRef : Interface name of port Interface name of port
:param bool PMClassBEnable : Enable/Disable control for CLASS-B PM Enable/Disable control for CLASS-B PM
:param bool PMClassCEnable : Enable/Disable control for CLASS-C PM Enable/Disable control for CLASS-C PM
:param float64 HighWarnThreshold : High warning threshold value for this PM High warning threshold value for this PM
:param float64 LowAlarmThreshold : Low alarm threshold value for this PM Low alarm threshold value for this PM
:param bool PMClassAEnable : Enable/Disable control for CLASS-A PM Enable/Disable control for CLASS-A PM
:param float64 HighAlarmThreshold : High alarm threshold value for this PM High alarm threshold value for this PM
:param float64 LowWarnThreshold : Low warning threshold value for this PM Low warning threshold value for this PM
"""
def createEthernetPM(self,
Resource,
IntfRef,
PMClassBEnable=True,
PMClassCEnable=True,
HighWarnThreshold='100000',
LowAlarmThreshold='-100000',
PMClassAEnable=True,
HighAlarmThreshold='100000',
LowWarnThreshold='-100000'):
obj = {
'Resource' : Resource,
'IntfRef' : IntfRef,
'PMClassBEnable' : True if PMClassBEnable else False,
'PMClassCEnable' : True if PMClassCEnable else False,
'HighWarnThreshold' : HighWarnThreshold,
'LowAlarmThreshold' : LowAlarmThreshold,
'PMClassAEnable' : True if PMClassAEnable else False,
'HighAlarmThreshold' : HighAlarmThreshold,
'LowWarnThreshold' : LowWarnThreshold,
}
reqUrl = self.cfgUrlBase+'EthernetPM'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteEthernetPM(self,
Resource,
IntfRef):
obj = {
'Resource' : Resource,
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'EthernetPM'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteEthernetPMById(self, objectId ):
reqUrl = self.cfgUrlBase+'EthernetPM'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateEthernetPM(self,
Resource,
IntfRef,
PMClassBEnable = None,
PMClassCEnable = None,
HighWarnThreshold = None,
LowAlarmThreshold = None,
PMClassAEnable = None,
HighAlarmThreshold = None,
LowWarnThreshold = None):
obj = {}
if Resource != None :
obj['Resource'] = Resource
if IntfRef != None :
obj['IntfRef'] = IntfRef
if PMClassBEnable != None :
obj['PMClassBEnable'] = True if PMClassBEnable else False
if PMClassCEnable != None :
obj['PMClassCEnable'] = True if PMClassCEnable else False
if HighWarnThreshold != None :
obj['HighWarnThreshold'] = HighWarnThreshold
if LowAlarmThreshold != None :
obj['LowAlarmThreshold'] = LowAlarmThreshold
if PMClassAEnable != None :
obj['PMClassAEnable'] = True if PMClassAEnable else False
if HighAlarmThreshold != None :
obj['HighAlarmThreshold'] = HighAlarmThreshold
if LowWarnThreshold != None :
obj['LowWarnThreshold'] = LowWarnThreshold
reqUrl = self.cfgUrlBase+'EthernetPM'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateEthernetPMById(self,
objectId,
PMClassBEnable = None,
PMClassCEnable = None,
HighWarnThreshold = None,
LowAlarmThreshold = None,
PMClassAEnable = None,
HighAlarmThreshold = None,
LowWarnThreshold = None):
obj = {}
if PMClassBEnable != None:
obj['PMClassBEnable'] = PMClassBEnable
if PMClassCEnable != None:
obj['PMClassCEnable'] = PMClassCEnable
if HighWarnThreshold != None:
obj['HighWarnThreshold'] = HighWarnThreshold
if LowAlarmThreshold != None:
obj['LowAlarmThreshold'] = LowAlarmThreshold
if PMClassAEnable != None:
obj['PMClassAEnable'] = PMClassAEnable
if HighAlarmThreshold != None:
obj['HighAlarmThreshold'] = HighAlarmThreshold
if LowWarnThreshold != None:
obj['LowWarnThreshold'] = LowWarnThreshold
reqUrl = self.cfgUrlBase+'EthernetPM'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateEthernetPM(self,
Resource,
IntfRef,
op,
path,
value,):
obj = {}
obj['Resource'] = Resource
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'EthernetPM'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getEthernetPM(self,
Resource,
IntfRef):
obj = {
'Resource' : Resource,
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'EthernetPM'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getEthernetPMById(self, objectId ):
reqUrl = self.cfgUrlBase + 'EthernetPM'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllEthernetPMs(self):
return self.getObjects('EthernetPM', self.cfgUrlBase)
def getPolicyASPathSetState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'PolicyASPathSet'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyASPathSetStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PolicyASPathSet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyASPathSetStates(self):
return self.getObjects('PolicyASPathSet', self.stateUrlBase)
def updateBfdGlobal(self,
Vrf,
Enable = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if Enable != None :
obj['Enable'] = True if Enable else False
reqUrl = self.cfgUrlBase+'BfdGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBfdGlobalById(self,
objectId,
Enable = None):
obj = {}
if Enable != None:
obj['Enable'] = Enable
reqUrl = self.cfgUrlBase+'BfdGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBfdGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BfdGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBfdGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'BfdGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBfdGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BfdGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBfdGlobals(self):
return self.getObjects('BfdGlobal', self.cfgUrlBase)
def getDWDMModuleClntIntfState(self,
ClntIntfId,
ModuleId):
obj = {
'ClntIntfId' : int(ClntIntfId),
'ModuleId' : int(ModuleId),
}
reqUrl = self.stateUrlBase + 'DWDMModuleClntIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDWDMModuleClntIntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DWDMModuleClntIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDWDMModuleClntIntfStates(self):
return self.getObjects('DWDMModuleClntIntf', self.stateUrlBase)
def getRouteStatState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'RouteStat'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getRouteStatStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'RouteStat'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllRouteStatStates(self):
return self.getObjects('RouteStat', self.stateUrlBase)
def getRouteDistanceState(self,
Protocol):
obj = {
'Protocol' : Protocol,
}
reqUrl = self.stateUrlBase + 'RouteDistance'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getRouteDistanceStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'RouteDistance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllRouteDistanceStates(self):
return self.getObjects('RouteDistance', self.stateUrlBase)
"""
.. automethod :: createLogicalIntf(self,
:param string Name : Name of logical interface Name of logical interface
:param string Type : Type of logical interface (e.x. loopback) Type of logical interface (e.x. loopback)
"""
def createLogicalIntf(self,
Name,
Type='Loopback'):
obj = {
'Name' : Name,
'Type' : Type,
}
reqUrl = self.cfgUrlBase+'LogicalIntf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteLogicalIntf(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'LogicalIntf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteLogicalIntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'LogicalIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateLogicalIntf(self,
Name,
Type = None):
obj = {}
if Name != None :
obj['Name'] = Name
if Type != None :
obj['Type'] = Type
reqUrl = self.cfgUrlBase+'LogicalIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateLogicalIntfById(self,
objectId,
Type = None):
obj = {}
if Type != None:
obj['Type'] = Type
reqUrl = self.cfgUrlBase+'LogicalIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateLogicalIntf(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'LogicalIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getLogicalIntf(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'LogicalIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLogicalIntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'LogicalIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLogicalIntfs(self):
return self.getObjects('LogicalIntf', self.cfgUrlBase)
def getBGPv6NeighborState(self,
IntfRef,
NeighborAddress):
obj = {
'IntfRef' : IntfRef,
'NeighborAddress' : NeighborAddress,
}
reqUrl = self.stateUrlBase + 'BGPv6Neighbor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv6NeighborStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BGPv6Neighbor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv6NeighborStates(self):
return self.getObjects('BGPv6Neighbor', self.stateUrlBase)
def updateLacpGlobal(self,
Vrf,
AdminState = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'LacpGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateLacpGlobalById(self,
objectId,
AdminState = None):
obj = {}
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'LacpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateLacpGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'LacpGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getLacpGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'LacpGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLacpGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'LacpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLacpGlobals(self):
return self.getObjects('LacpGlobal', self.cfgUrlBase)
"""
.. automethod :: executeForceApplyConfig(self,
"""
def executeForceApplyConfig(self):
obj = {
}
reqUrl = self.actionUrlBase+'ForceApplyConfig'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getMacTableEntryState(self,
MacAddr):
obj = {
'MacAddr' : MacAddr,
}
reqUrl = self.stateUrlBase + 'MacTableEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getMacTableEntryStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'MacTableEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllMacTableEntryStates(self):
return self.getObjects('MacTableEntry', self.stateUrlBase)
def getFanSensorPMDataState(self,
Class,
Name):
obj = {
'Class' : Class,
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'FanSensorPMData'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getFanSensorPMDataStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'FanSensorPMData'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllFanSensorPMDataStates(self):
return self.getObjects('FanSensorPMData', self.stateUrlBase)
def getOspfNbrEntryState(self,
NbrIpAddr,
NbrAddressLessIndex):
obj = {
'NbrIpAddr' : NbrIpAddr,
'NbrAddressLessIndex' : int(NbrAddressLessIndex),
}
reqUrl = self.stateUrlBase + 'OspfNbrEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfNbrEntryStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'OspfNbrEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfNbrEntryStates(self):
return self.getObjects('OspfNbrEntry', self.stateUrlBase)
def updateSystemParam(self,
Vrf,
MgmtIp = None,
Hostname = None,
SwitchMac = None,
SwVersion = None,
Description = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if MgmtIp != None :
obj['MgmtIp'] = MgmtIp
if Hostname != None :
obj['Hostname'] = Hostname
if SwitchMac != None :
obj['SwitchMac'] = SwitchMac
if SwVersion != None :
obj['SwVersion'] = SwVersion
if Description != None :
obj['Description'] = Description
reqUrl = self.cfgUrlBase+'SystemParam'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateSystemParamById(self,
objectId,
MgmtIp = None,
Hostname = None,
SwitchMac = None,
SwVersion = None,
Description = None):
obj = {}
if MgmtIp != None:
obj['MgmtIp'] = MgmtIp
if Hostname != None:
obj['Hostname'] = Hostname
if SwitchMac != None:
obj['SwitchMac'] = SwitchMac
if SwVersion != None:
obj['SwVersion'] = SwVersion
if Description != None:
obj['Description'] = Description
reqUrl = self.cfgUrlBase+'SystemParam'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateSystemParam(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'SystemParam'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getSystemParam(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'SystemParam'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSystemParamById(self, objectId ):
reqUrl = self.cfgUrlBase + 'SystemParam'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSystemParams(self):
return self.getObjects('SystemParam', self.cfgUrlBase)
"""
.. automethod :: executeNdpDeleteByIfName(self,
:param string IfName : All the NDP learned for end host on given L3 interface will be deleted All the NDP learned for end host on given L3 interface will be deleted
"""
def executeNdpDeleteByIfName(self,
IfName):
obj = {
'IfName' : IfName,
}
reqUrl = self.actionUrlBase+'NdpDeleteByIfName'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: createVrrpV4Intf(self,
:param string IntfRef : Interface (name) for which VRRP Version 2 aka VRRP with ipv4 Config needs to be done Interface (name) for which VRRP Version 2 aka VRRP with ipv4 Config needs to be done
:param int32 VRID : Virtual Router's Unique Identifier Virtual Router's Unique Identifier
:param string Address : Virtual Router IPv4 address Virtual Router IPv4 address
:param bool PreemptMode : Controls whether a (starting or restarting) higher-priority Backup router preempts a lower-priority Master router Controls whether a (starting or restarting) higher-priority Backup router preempts a lower-priority Master router
:param string Version : vrrp should be running in which version vrrp should be running in which version
:param int32 Priority : Sending VRRP router's priority for the virtual router Sending VRRP router's priority for the virtual router
:param int32 AdvertisementInterval : Time interval between ADVERTISEMENTS Time interval between ADVERTISEMENTS
:param string AdminState : Vrrp State up or down Vrrp State up or down
:param bool AcceptMode : Controls whether a virtual router in Master state will accept packets addressed to the address owner's IPv4 address as its own if it is not the IPv4 address owner. Controls whether a virtual router in Master state will accept packets addressed to the address owner's IPv4 address as its own if it is not the IPv4 address owner.
"""
def createVrrpV4Intf(self,
IntfRef,
VRID,
Address,
PreemptMode=True,
Version='version3',
Priority=100,
AdvertisementInterval=1,
AdminState='DOWN',
AcceptMode=False):
obj = {
'IntfRef' : IntfRef,
'VRID' : int(VRID),
'Address' : Address,
'PreemptMode' : True if PreemptMode else False,
'Version' : Version,
'Priority' : int(Priority),
'AdvertisementInterval' : int(AdvertisementInterval),
'AdminState' : AdminState,
'AcceptMode' : True if AcceptMode else False,
}
reqUrl = self.cfgUrlBase+'VrrpV4Intf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVrrpV4Intf(self,
IntfRef,
VRID):
obj = {
'IntfRef' : IntfRef,
'VRID' : VRID,
}
reqUrl = self.cfgUrlBase+'VrrpV4Intf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVrrpV4IntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'VrrpV4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateVrrpV4Intf(self,
IntfRef,
VRID,
Address = None,
PreemptMode = None,
Version = None,
Priority = None,
AdvertisementInterval = None,
AdminState = None,
AcceptMode = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if VRID != None :
obj['VRID'] = int(VRID)
if Address != None :
obj['Address'] = Address
if PreemptMode != None :
obj['PreemptMode'] = True if PreemptMode else False
if Version != None :
obj['Version'] = Version
if Priority != None :
obj['Priority'] = int(Priority)
if AdvertisementInterval != None :
obj['AdvertisementInterval'] = int(AdvertisementInterval)
if AdminState != None :
obj['AdminState'] = AdminState
if AcceptMode != None :
obj['AcceptMode'] = True if AcceptMode else False
reqUrl = self.cfgUrlBase+'VrrpV4Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateVrrpV4IntfById(self,
objectId,
Address = None,
PreemptMode = None,
Version = None,
Priority = None,
AdvertisementInterval = None,
AdminState = None,
AcceptMode = None):
obj = {}
if Address != None:
obj['Address'] = Address
if PreemptMode != None:
obj['PreemptMode'] = PreemptMode
if Version != None:
obj['Version'] = Version
if Priority != None:
obj['Priority'] = Priority
if AdvertisementInterval != None:
obj['AdvertisementInterval'] = AdvertisementInterval
if AdminState != None:
obj['AdminState'] = AdminState
if AcceptMode != None:
obj['AcceptMode'] = AcceptMode
reqUrl = self.cfgUrlBase+'VrrpV4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateVrrpV4Intf(self,
IntfRef,
VRID,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['VRID'] = VRID
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'VrrpV4Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getVrrpV4Intf(self,
IntfRef,
VRID):
obj = {
'IntfRef' : IntfRef,
'VRID' : int(VRID),
}
reqUrl = self.cfgUrlBase + 'VrrpV4Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVrrpV4IntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'VrrpV4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVrrpV4Intfs(self):
return self.getObjects('VrrpV4Intf', self.cfgUrlBase)
"""
.. automethod :: createAclIpv6Filter(self,
:param string FilterName : AClIpv6 filter name . AClIpv6 filter name .
:param string SourceIpv6 : Source IPv6 address Source IPv6 address
:param int32 L4MinPort : Min port when l4 port is specified as range Min port when l4 port is specified as range
:param int32 L4DstPort : TCP/UDP destionation port TCP/UDP destionation port
:param string DestIpv6 : Destination IPv6 address Destination IPv6 address
:param string Proto : Protocol type TCP/UDP/ICMPv4/ICMPv6 Protocol type TCP/UDP/ICMPv4/ICMPv6
:param int32 L4SrcPort : TCP/UDP source port TCP/UDP source port
:param string DstIntf : Dest Intf(used for mlag) Dest Intf(used for mlag)
:param string SrcIntf : Source Intf(used for mlag) Source Intf(used for mlag)
:param string SourceMaskv6 : Network mask for source IPv6 Network mask for source IPv6
:param string DestMaskv6 : Network mark for dest IPv6 Network mark for dest IPv6
:param int32 L4MaxPort : Max port when l4 port is specified as range Max port when l4 port is specified as range
:param string L4PortMatch : match condition can be EQ(equal) match condition can be EQ(equal)
"""
def createAclIpv6Filter(self,
FilterName,
SourceIpv6='',
L4MinPort=0,
L4DstPort=0,
DestIpv6='',
Proto='',
L4SrcPort=0,
DstIntf='',
SrcIntf='',
SourceMaskv6='',
DestMaskv6='',
L4MaxPort=0,
L4PortMatch='NA'):
obj = {
'FilterName' : FilterName,
'SourceIpv6' : SourceIpv6,
'L4MinPort' : int(L4MinPort),
'L4DstPort' : int(L4DstPort),
'DestIpv6' : DestIpv6,
'Proto' : Proto,
'L4SrcPort' : int(L4SrcPort),
'DstIntf' : DstIntf,
'SrcIntf' : SrcIntf,
'SourceMaskv6' : SourceMaskv6,
'DestMaskv6' : DestMaskv6,
'L4MaxPort' : int(L4MaxPort),
'L4PortMatch' : L4PortMatch,
}
reqUrl = self.cfgUrlBase+'AclIpv6Filter'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAclIpv6Filter(self,
FilterName):
obj = {
'FilterName' : FilterName,
}
reqUrl = self.cfgUrlBase+'AclIpv6Filter'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAclIpv6FilterById(self, objectId ):
reqUrl = self.cfgUrlBase+'AclIpv6Filter'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateAclIpv6Filter(self,
FilterName,
SourceIpv6 = None,
L4MinPort = None,
L4DstPort = None,
DestIpv6 = None,
Proto = None,
L4SrcPort = None,
DstIntf = None,
SrcIntf = None,
SourceMaskv6 = None,
DestMaskv6 = None,
L4MaxPort = None,
L4PortMatch = None):
obj = {}
if FilterName != None :
obj['FilterName'] = FilterName
if SourceIpv6 != None :
obj['SourceIpv6'] = SourceIpv6
if L4MinPort != None :
obj['L4MinPort'] = int(L4MinPort)
if L4DstPort != None :
obj['L4DstPort'] = int(L4DstPort)
if DestIpv6 != None :
obj['DestIpv6'] = DestIpv6
if Proto != None :
obj['Proto'] = Proto
if L4SrcPort != None :
obj['L4SrcPort'] = int(L4SrcPort)
if DstIntf != None :
obj['DstIntf'] = DstIntf
if SrcIntf != None :
obj['SrcIntf'] = SrcIntf
if SourceMaskv6 != None :
obj['SourceMaskv6'] = SourceMaskv6
if DestMaskv6 != None :
obj['DestMaskv6'] = DestMaskv6
if L4MaxPort != None :
obj['L4MaxPort'] = int(L4MaxPort)
if L4PortMatch != None :
obj['L4PortMatch'] = L4PortMatch
reqUrl = self.cfgUrlBase+'AclIpv6Filter'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateAclIpv6FilterById(self,
objectId,
SourceIpv6 = None,
L4MinPort = None,
L4DstPort = None,
DestIpv6 = None,
Proto = None,
L4SrcPort = None,
DstIntf = None,
SrcIntf = None,
SourceMaskv6 = None,
DestMaskv6 = None,
L4MaxPort = None,
L4PortMatch = None):
obj = {}
if SourceIpv6 != None:
obj['SourceIpv6'] = SourceIpv6
if L4MinPort != None:
obj['L4MinPort'] = L4MinPort
if L4DstPort != None:
obj['L4DstPort'] = L4DstPort
if DestIpv6 != None:
obj['DestIpv6'] = DestIpv6
if Proto != None:
obj['Proto'] = Proto
if L4SrcPort != None:
obj['L4SrcPort'] = L4SrcPort
if DstIntf != None:
obj['DstIntf'] = DstIntf
if SrcIntf != None:
obj['SrcIntf'] = SrcIntf
if SourceMaskv6 != None:
obj['SourceMaskv6'] = SourceMaskv6
if DestMaskv6 != None:
obj['DestMaskv6'] = DestMaskv6
if L4MaxPort != None:
obj['L4MaxPort'] = L4MaxPort
if L4PortMatch != None:
obj['L4PortMatch'] = L4PortMatch
reqUrl = self.cfgUrlBase+'AclIpv6Filter'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateAclIpv6Filter(self,
FilterName,
op,
path,
value,):
obj = {}
obj['FilterName'] = FilterName
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'AclIpv6Filter'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getAclIpv6Filter(self,
FilterName):
obj = {
'FilterName' : FilterName,
}
reqUrl = self.cfgUrlBase + 'AclIpv6Filter'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAclIpv6FilterById(self, objectId ):
reqUrl = self.cfgUrlBase + 'AclIpv6Filter'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAclIpv6Filters(self):
return self.getObjects('AclIpv6Filter', self.cfgUrlBase)
"""
.. automethod :: executeResetBGPv6NeighborByInterface(self,
:param string IntfRef : Interface of the BGP IPv6 neighbor to restart Interface of the BGP IPv6 neighbor to restart
"""
def executeResetBGPv6NeighborByInterface(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.actionUrlBase+'ResetBGPv6NeighborByInterface'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: createVoltageSensor(self,
:param string Name : Voltage Sensor Name Voltage Sensor Name
:param float64 HigherAlarmThreshold : Higher Alarm Threshold for TCA Higher Alarm Threshold for TCA
:param float64 HigherWarningThreshold : Higher Warning Threshold for TCA Higher Warning Threshold for TCA
:param float64 LowerWarningThreshold : Lower Warning Threshold for TCA Lower Warning Threshold for TCA
:param float64 LowerAlarmThreshold : Lower Alarm Threshold for TCA Lower Alarm Threshold for TCA
:param string PMClassCAdminState : PM Class-C Admin State PM Class-C Admin State
:param string PMClassAAdminState : PM Class-A Admin State PM Class-A Admin State
:param string AdminState : Enable/Disable Enable/Disable
:param string PMClassBAdminState : PM Class-B Admin State PM Class-B Admin State
"""
def createVoltageSensor(self,
Name,
HigherAlarmThreshold,
HigherWarningThreshold,
LowerWarningThreshold,
LowerAlarmThreshold,
PMClassCAdminState='Enable',
PMClassAAdminState='Enable',
AdminState='Enable',
PMClassBAdminState='Enable'):
obj = {
'Name' : Name,
'HigherAlarmThreshold' : HigherAlarmThreshold,
'HigherWarningThreshold' : HigherWarningThreshold,
'LowerWarningThreshold' : LowerWarningThreshold,
'LowerAlarmThreshold' : LowerAlarmThreshold,
'PMClassCAdminState' : PMClassCAdminState,
'PMClassAAdminState' : PMClassAAdminState,
'AdminState' : AdminState,
'PMClassBAdminState' : PMClassBAdminState,
}
reqUrl = self.cfgUrlBase+'VoltageSensor'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVoltageSensor(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'VoltageSensor'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVoltageSensorById(self, objectId ):
reqUrl = self.cfgUrlBase+'VoltageSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateVoltageSensor(self,
Name,
HigherAlarmThreshold = None,
HigherWarningThreshold = None,
LowerWarningThreshold = None,
LowerAlarmThreshold = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None,
PMClassBAdminState = None):
obj = {}
if Name != None :
obj['Name'] = Name
if HigherAlarmThreshold != None :
obj['HigherAlarmThreshold'] = HigherAlarmThreshold
if HigherWarningThreshold != None :
obj['HigherWarningThreshold'] = HigherWarningThreshold
if LowerWarningThreshold != None :
obj['LowerWarningThreshold'] = LowerWarningThreshold
if LowerAlarmThreshold != None :
obj['LowerAlarmThreshold'] = LowerAlarmThreshold
if PMClassCAdminState != None :
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None :
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None :
obj['AdminState'] = AdminState
if PMClassBAdminState != None :
obj['PMClassBAdminState'] = PMClassBAdminState
reqUrl = self.cfgUrlBase+'VoltageSensor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateVoltageSensorById(self,
objectId,
HigherAlarmThreshold = None,
HigherWarningThreshold = None,
LowerWarningThreshold = None,
LowerAlarmThreshold = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None,
PMClassBAdminState = None):
obj = {}
if HigherAlarmThreshold != None:
obj['HigherAlarmThreshold'] = HigherAlarmThreshold
if HigherWarningThreshold != None:
obj['HigherWarningThreshold'] = HigherWarningThreshold
if LowerWarningThreshold != None:
obj['LowerWarningThreshold'] = LowerWarningThreshold
if LowerAlarmThreshold != None:
obj['LowerAlarmThreshold'] = LowerAlarmThreshold
if PMClassCAdminState != None:
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None:
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None:
obj['AdminState'] = AdminState
if PMClassBAdminState != None:
obj['PMClassBAdminState'] = PMClassBAdminState
reqUrl = self.cfgUrlBase+'VoltageSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateVoltageSensor(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'VoltageSensor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getVoltageSensor(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'VoltageSensor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVoltageSensorById(self, objectId ):
reqUrl = self.cfgUrlBase + 'VoltageSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVoltageSensors(self):
return self.getObjects('VoltageSensor', self.cfgUrlBase)
def getAlarmState(self,
EventId,
EventName,
SrcObjName,
OwnerName,
OwnerId):
obj = {
'EventId' : int(EventId),
'EventName' : EventName,
'SrcObjName' : SrcObjName,
'OwnerName' : OwnerName,
'OwnerId' : int(OwnerId),
}
reqUrl = self.stateUrlBase + 'Alarm'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAlarmStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Alarm'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAlarmStates(self):
return self.getObjects('Alarm', self.stateUrlBase)
def getQsfpPMDataState(self,
Resource,
Class,
QsfpId):
obj = {
'Resource' : Resource,
'Class' : Class,
'QsfpId' : int(QsfpId),
}
reqUrl = self.stateUrlBase + 'QsfpPMData'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getQsfpPMDataStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'QsfpPMData'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllQsfpPMDataStates(self):
return self.getObjects('QsfpPMData', self.stateUrlBase)
def getLogicalIntfState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'LogicalIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLogicalIntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'LogicalIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLogicalIntfStates(self):
return self.getObjects('LogicalIntf', self.stateUrlBase)
"""
.. automethod :: createAsicGlobalPM(self,
:param string Resource : Resource identifier Resource identifier
:param uint8 ModuleId : Module identifier Module identifier
:param bool PMClassBEnable : Enable/Disable control for CLASS-B PM Enable/Disable control for CLASS-B PM
:param float64 HighWarnThreshold : High warning threshold value for this PM High warning threshold value for this PM
:param float64 LowAlarmThreshold : Low alarm threshold value for this PM Low alarm threshold value for this PM
:param bool PMClassCEnable : Enable/Disable control for CLASS-C PM Enable/Disable control for CLASS-C PM
:param bool PMClassAEnable : Enable/Disable control for CLASS-A PM Enable/Disable control for CLASS-A PM
:param float64 LowWarnThreshold : Low warning threshold value for this PM Low warning threshold value for this PM
:param float64 HighAlarmThreshold : High alarm threshold value for this PM High alarm threshold value for this PM
"""
def createAsicGlobalPM(self,
Resource,
PMClassBEnable=True,
HighWarnThreshold='100000',
LowAlarmThreshold='-100000',
PMClassCEnable=True,
PMClassAEnable=True,
LowWarnThreshold='-100000',
HighAlarmThreshold='100000'):
obj = {
'Resource' : Resource,
'ModuleId' : int(0),
'PMClassBEnable' : True if PMClassBEnable else False,
'HighWarnThreshold' : HighWarnThreshold,
'LowAlarmThreshold' : LowAlarmThreshold,
'PMClassCEnable' : True if PMClassCEnable else False,
'PMClassAEnable' : True if PMClassAEnable else False,
'LowWarnThreshold' : LowWarnThreshold,
'HighAlarmThreshold' : HighAlarmThreshold,
}
reqUrl = self.cfgUrlBase+'AsicGlobalPM'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAsicGlobalPM(self,
Resource,
ModuleId):
obj = {
'Resource' : Resource,
'ModuleId' : ModuleId,
}
reqUrl = self.cfgUrlBase+'AsicGlobalPM'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAsicGlobalPMById(self, objectId ):
reqUrl = self.cfgUrlBase+'AsicGlobalPM'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateAsicGlobalPM(self,
Resource,
ModuleId,
PMClassBEnable = None,
HighWarnThreshold = None,
LowAlarmThreshold = None,
PMClassCEnable = None,
PMClassAEnable = None,
LowWarnThreshold = None,
HighAlarmThreshold = None):
obj = {}
if Resource != None :
obj['Resource'] = Resource
if ModuleId != None :
obj['ModuleId'] = int(ModuleId)
if PMClassBEnable != None :
obj['PMClassBEnable'] = True if PMClassBEnable else False
if HighWarnThreshold != None :
obj['HighWarnThreshold'] = HighWarnThreshold
if LowAlarmThreshold != None :
obj['LowAlarmThreshold'] = LowAlarmThreshold
if PMClassCEnable != None :
obj['PMClassCEnable'] = True if PMClassCEnable else False
if PMClassAEnable != None :
obj['PMClassAEnable'] = True if PMClassAEnable else False
if LowWarnThreshold != None :
obj['LowWarnThreshold'] = LowWarnThreshold
if HighAlarmThreshold != None :
obj['HighAlarmThreshold'] = HighAlarmThreshold
reqUrl = self.cfgUrlBase+'AsicGlobalPM'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateAsicGlobalPMById(self,
objectId,
PMClassBEnable = None,
HighWarnThreshold = None,
LowAlarmThreshold = None,
PMClassCEnable = None,
PMClassAEnable = None,
LowWarnThreshold = None,
HighAlarmThreshold = None):
obj = {}
if PMClassBEnable != None:
obj['PMClassBEnable'] = PMClassBEnable
if HighWarnThreshold != None:
obj['HighWarnThreshold'] = HighWarnThreshold
if LowAlarmThreshold != None:
obj['LowAlarmThreshold'] = LowAlarmThreshold
if PMClassCEnable != None:
obj['PMClassCEnable'] = PMClassCEnable
if PMClassAEnable != None:
obj['PMClassAEnable'] = PMClassAEnable
if LowWarnThreshold != None:
obj['LowWarnThreshold'] = LowWarnThreshold
if HighAlarmThreshold != None:
obj['HighAlarmThreshold'] = HighAlarmThreshold
reqUrl = self.cfgUrlBase+'AsicGlobalPM'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateAsicGlobalPM(self,
Resource,
ModuleId,
op,
path,
value,):
obj = {}
obj['Resource'] = Resource
obj['ModuleId'] = ModuleId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'AsicGlobalPM'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getAsicGlobalPM(self,
Resource,
ModuleId):
obj = {
'Resource' : Resource,
'ModuleId' : int(ModuleId),
}
reqUrl = self.cfgUrlBase + 'AsicGlobalPM'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAsicGlobalPMById(self, objectId ):
reqUrl = self.cfgUrlBase + 'AsicGlobalPM'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAsicGlobalPMs(self):
return self.getObjects('AsicGlobalPM', self.cfgUrlBase)
def getIPv4RouteHwState(self,
DestinationNw):
obj = {
'DestinationNw' : DestinationNw,
}
reqUrl = self.stateUrlBase + 'IPv4RouteHw'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv4RouteHwStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'IPv4RouteHw'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv4RouteHwStates(self):
return self.getObjects('IPv4RouteHw', self.stateUrlBase)
def updateArpGlobal(self,
Vrf,
Timeout = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if Timeout != None :
obj['Timeout'] = int(Timeout)
reqUrl = self.cfgUrlBase+'ArpGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateArpGlobalById(self,
objectId,
Timeout = None):
obj = {}
if Timeout != None:
obj['Timeout'] = Timeout
reqUrl = self.cfgUrlBase+'ArpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateArpGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'ArpGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getArpGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'ArpGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getArpGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'ArpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllArpGlobals(self):
return self.getObjects('ArpGlobal', self.cfgUrlBase)
"""
.. automethod :: createPolicyCommunitySet(self,
:param string Name : Policy Community List name. Policy Community List name.
:param string CommunityList : List of policy communities part of this community list. List of policy communities part of this community list.
"""
def createPolicyCommunitySet(self,
Name,
CommunityList):
obj = {
'Name' : Name,
'CommunityList' : CommunityList,
}
reqUrl = self.cfgUrlBase+'PolicyCommunitySet'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyCommunitySet(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'PolicyCommunitySet'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyCommunitySetById(self, objectId ):
reqUrl = self.cfgUrlBase+'PolicyCommunitySet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePolicyCommunitySet(self,
Name,
CommunityList = None):
obj = {}
if Name != None :
obj['Name'] = Name
if CommunityList != None :
obj['CommunityList'] = CommunityList
reqUrl = self.cfgUrlBase+'PolicyCommunitySet'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePolicyCommunitySetById(self,
objectId,
CommunityList = None):
obj = {}
if CommunityList != None:
obj['CommunityList'] = CommunityList
reqUrl = self.cfgUrlBase+'PolicyCommunitySet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePolicyCommunitySet(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'PolicyCommunitySet'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPolicyCommunitySet(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'PolicyCommunitySet'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyCommunitySetById(self, objectId ):
reqUrl = self.cfgUrlBase + 'PolicyCommunitySet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyCommunitySets(self):
return self.getObjects('PolicyCommunitySet', self.cfgUrlBase)
"""
.. automethod :: createPolicyExtendedCommunitySet(self,
:param string Name : Policy Extended Community List name. Policy Extended Community List name.
:param PolicyExtendedCommunity ExtendedCommunityList : List of policy communities part of this community list. List of policy communities part of this community list.
"""
def createPolicyExtendedCommunitySet(self,
Name,
ExtendedCommunityList):
obj = {
'Name' : Name,
'ExtendedCommunityList' : ExtendedCommunityList,
}
reqUrl = self.cfgUrlBase+'PolicyExtendedCommunitySet'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyExtendedCommunitySet(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'PolicyExtendedCommunitySet'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyExtendedCommunitySetById(self, objectId ):
reqUrl = self.cfgUrlBase+'PolicyExtendedCommunitySet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePolicyExtendedCommunitySet(self,
Name,
ExtendedCommunityList = None):
obj = {}
if Name != None :
obj['Name'] = Name
if ExtendedCommunityList != None :
obj['ExtendedCommunityList'] = ExtendedCommunityList
reqUrl = self.cfgUrlBase+'PolicyExtendedCommunitySet'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePolicyExtendedCommunitySetById(self,
objectId,
ExtendedCommunityList = None):
obj = {}
if ExtendedCommunityList != None:
obj['ExtendedCommunityList'] = ExtendedCommunityList
reqUrl = self.cfgUrlBase+'PolicyExtendedCommunitySet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePolicyExtendedCommunitySet(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'PolicyExtendedCommunitySet'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPolicyExtendedCommunitySet(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'PolicyExtendedCommunitySet'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyExtendedCommunitySetById(self, objectId ):
reqUrl = self.cfgUrlBase + 'PolicyExtendedCommunitySet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyExtendedCommunitySets(self):
return self.getObjects('PolicyExtendedCommunitySet', self.cfgUrlBase)
"""
.. automethod :: createBGPv4PeerGroup(self,
:param string Name : Name of the BGP peer group Name of the BGP peer group
:param bool RouteReflectorClient : Set/Clear BGP neighbor as a route reflector client Set/Clear BGP neighbor as a route reflector client
:param uint8 MultiHopTTL : TTL for multi hop BGP neighbor TTL for multi hop BGP neighbor
:param string LocalAS : Local AS of the BGP neighbor Local AS of the BGP neighbor
:param uint32 KeepaliveTime : Keep alive time for the BGP neighbor Keep alive time for the BGP neighbor
:param bool AddPathsRx : Receive additional paths from BGP neighbor Receive additional paths from BGP neighbor
:param string UpdateSource : Source IP to connect to the BGP neighbor Source IP to connect to the BGP neighbor
:param uint8 MaxPrefixesRestartTimer : Time to wait before we start BGP peer session when we receive max prefixes Time to wait before we start BGP peer session when we receive max prefixes
:param string Description : Description of the BGP neighbor Description of the BGP neighbor
:param bool MultiHopEnable : Enable/Disable multi hop for BGP neighbor Enable/Disable multi hop for BGP neighbor
:param string AuthPassword : Password to connect to the BGP neighbor Password to connect to the BGP neighbor
:param uint32 RouteReflectorClusterId : Cluster Id of the internal BGP neighbor route reflector client Cluster Id of the internal BGP neighbor route reflector client
:param string AdjRIBOutFilter : Policy that is applied for Adj-RIB-Out prefix filtering Policy that is applied for Adj-RIB-Out prefix filtering
:param bool MaxPrefixesDisconnect : Disconnect the BGP peer session when we receive the max prefixes from the neighbor Disconnect the BGP peer session when we receive the max prefixes from the neighbor
:param string PeerAS : Peer AS of the BGP neighbor Peer AS of the BGP neighbor
:param uint8 AddPathsMaxTx : Max number of additional paths that can be transmitted to BGP neighbor Max number of additional paths that can be transmitted to BGP neighbor
:param string AdjRIBInFilter : Policy that is applied for Adj-RIB-In prefix filtering Policy that is applied for Adj-RIB-In prefix filtering
:param uint32 MaxPrefixes : Maximum number of prefixes that can be received from the BGP neighbor Maximum number of prefixes that can be received from the BGP neighbor
:param uint8 MaxPrefixesThresholdPct : The percentage of maximum prefixes before we start logging The percentage of maximum prefixes before we start logging
:param bool NextHopSelf : Use neighbor source IP as the next hop for IBGP neighbors Use neighbor source IP as the next hop for IBGP neighbors
:param uint32 HoldTime : Hold time for the BGP neighbor Hold time for the BGP neighbor
:param uint32 ConnectRetryTime : Connect retry time to connect to BGP neighbor after disconnect Connect retry time to connect to BGP neighbor after disconnect
"""
def createBGPv4PeerGroup(self,
Name,
RouteReflectorClient=False,
MultiHopTTL=0,
LocalAS='',
KeepaliveTime=0,
AddPathsRx=False,
UpdateSource='',
MaxPrefixesRestartTimer=0,
Description='',
MultiHopEnable=False,
AuthPassword='',
RouteReflectorClusterId=0,
AdjRIBOutFilter='',
MaxPrefixesDisconnect=False,
PeerAS='',
AddPathsMaxTx=0,
AdjRIBInFilter='',
MaxPrefixes=0,
MaxPrefixesThresholdPct=80,
NextHopSelf=False,
HoldTime=0,
ConnectRetryTime=0):
obj = {
'Name' : Name,
'RouteReflectorClient' : True if RouteReflectorClient else False,
'MultiHopTTL' : int(MultiHopTTL),
'LocalAS' : LocalAS,
'KeepaliveTime' : int(KeepaliveTime),
'AddPathsRx' : True if AddPathsRx else False,
'UpdateSource' : UpdateSource,
'MaxPrefixesRestartTimer' : int(MaxPrefixesRestartTimer),
'Description' : Description,
'MultiHopEnable' : True if MultiHopEnable else False,
'AuthPassword' : AuthPassword,
'RouteReflectorClusterId' : int(RouteReflectorClusterId),
'AdjRIBOutFilter' : AdjRIBOutFilter,
'MaxPrefixesDisconnect' : True if MaxPrefixesDisconnect else False,
'PeerAS' : PeerAS,
'AddPathsMaxTx' : int(AddPathsMaxTx),
'AdjRIBInFilter' : AdjRIBInFilter,
'MaxPrefixes' : int(MaxPrefixes),
'MaxPrefixesThresholdPct' : int(MaxPrefixesThresholdPct),
'NextHopSelf' : True if NextHopSelf else False,
'HoldTime' : int(HoldTime),
'ConnectRetryTime' : int(ConnectRetryTime),
}
reqUrl = self.cfgUrlBase+'BGPv4PeerGroup'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv4PeerGroup(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'BGPv4PeerGroup'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv4PeerGroupById(self, objectId ):
reqUrl = self.cfgUrlBase+'BGPv4PeerGroup'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateBGPv4PeerGroup(self,
Name,
RouteReflectorClient = None,
MultiHopTTL = None,
LocalAS = None,
KeepaliveTime = None,
AddPathsRx = None,
UpdateSource = None,
MaxPrefixesRestartTimer = None,
Description = None,
MultiHopEnable = None,
AuthPassword = None,
RouteReflectorClusterId = None,
AdjRIBOutFilter = None,
MaxPrefixesDisconnect = None,
PeerAS = None,
AddPathsMaxTx = None,
AdjRIBInFilter = None,
MaxPrefixes = None,
MaxPrefixesThresholdPct = None,
NextHopSelf = None,
HoldTime = None,
ConnectRetryTime = None):
obj = {}
if Name != None :
obj['Name'] = Name
if RouteReflectorClient != None :
obj['RouteReflectorClient'] = True if RouteReflectorClient else False
if MultiHopTTL != None :
obj['MultiHopTTL'] = int(MultiHopTTL)
if LocalAS != None :
obj['LocalAS'] = LocalAS
if KeepaliveTime != None :
obj['KeepaliveTime'] = int(KeepaliveTime)
if AddPathsRx != None :
obj['AddPathsRx'] = True if AddPathsRx else False
if UpdateSource != None :
obj['UpdateSource'] = UpdateSource
if MaxPrefixesRestartTimer != None :
obj['MaxPrefixesRestartTimer'] = int(MaxPrefixesRestartTimer)
if Description != None :
obj['Description'] = Description
if MultiHopEnable != None :
obj['MultiHopEnable'] = True if MultiHopEnable else False
if AuthPassword != None :
obj['AuthPassword'] = AuthPassword
if RouteReflectorClusterId != None :
obj['RouteReflectorClusterId'] = int(RouteReflectorClusterId)
if AdjRIBOutFilter != None :
obj['AdjRIBOutFilter'] = AdjRIBOutFilter
if MaxPrefixesDisconnect != None :
obj['MaxPrefixesDisconnect'] = True if MaxPrefixesDisconnect else False
if PeerAS != None :
obj['PeerAS'] = PeerAS
if AddPathsMaxTx != None :
obj['AddPathsMaxTx'] = int(AddPathsMaxTx)
if AdjRIBInFilter != None :
obj['AdjRIBInFilter'] = AdjRIBInFilter
if MaxPrefixes != None :
obj['MaxPrefixes'] = int(MaxPrefixes)
if MaxPrefixesThresholdPct != None :
obj['MaxPrefixesThresholdPct'] = int(MaxPrefixesThresholdPct)
if NextHopSelf != None :
obj['NextHopSelf'] = True if NextHopSelf else False
if HoldTime != None :
obj['HoldTime'] = int(HoldTime)
if ConnectRetryTime != None :
obj['ConnectRetryTime'] = int(ConnectRetryTime)
reqUrl = self.cfgUrlBase+'BGPv4PeerGroup'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBGPv4PeerGroupById(self,
objectId,
RouteReflectorClient = None,
MultiHopTTL = None,
LocalAS = None,
KeepaliveTime = None,
AddPathsRx = None,
UpdateSource = None,
MaxPrefixesRestartTimer = None,
Description = None,
MultiHopEnable = None,
AuthPassword = None,
RouteReflectorClusterId = None,
AdjRIBOutFilter = None,
MaxPrefixesDisconnect = None,
PeerAS = None,
AddPathsMaxTx = None,
AdjRIBInFilter = None,
MaxPrefixes = None,
MaxPrefixesThresholdPct = None,
NextHopSelf = None,
HoldTime = None,
ConnectRetryTime = None):
obj = {}
if RouteReflectorClient != None:
obj['RouteReflectorClient'] = RouteReflectorClient
if MultiHopTTL != None:
obj['MultiHopTTL'] = MultiHopTTL
if LocalAS != None:
obj['LocalAS'] = LocalAS
if KeepaliveTime != None:
obj['KeepaliveTime'] = KeepaliveTime
if AddPathsRx != None:
obj['AddPathsRx'] = AddPathsRx
if UpdateSource != None:
obj['UpdateSource'] = UpdateSource
if MaxPrefixesRestartTimer != None:
obj['MaxPrefixesRestartTimer'] = MaxPrefixesRestartTimer
if Description != None:
obj['Description'] = Description
if MultiHopEnable != None:
obj['MultiHopEnable'] = MultiHopEnable
if AuthPassword != None:
obj['AuthPassword'] = AuthPassword
if RouteReflectorClusterId != None:
obj['RouteReflectorClusterId'] = RouteReflectorClusterId
if AdjRIBOutFilter != None:
obj['AdjRIBOutFilter'] = AdjRIBOutFilter
if MaxPrefixesDisconnect != None:
obj['MaxPrefixesDisconnect'] = MaxPrefixesDisconnect
if PeerAS != None:
obj['PeerAS'] = PeerAS
if AddPathsMaxTx != None:
obj['AddPathsMaxTx'] = AddPathsMaxTx
if AdjRIBInFilter != None:
obj['AdjRIBInFilter'] = AdjRIBInFilter
if MaxPrefixes != None:
obj['MaxPrefixes'] = MaxPrefixes
if MaxPrefixesThresholdPct != None:
obj['MaxPrefixesThresholdPct'] = MaxPrefixesThresholdPct
if NextHopSelf != None:
obj['NextHopSelf'] = NextHopSelf
if HoldTime != None:
obj['HoldTime'] = HoldTime
if ConnectRetryTime != None:
obj['ConnectRetryTime'] = ConnectRetryTime
reqUrl = self.cfgUrlBase+'BGPv4PeerGroup'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBGPv4PeerGroup(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BGPv4PeerGroup'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBGPv4PeerGroup(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'BGPv4PeerGroup'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv4PeerGroupById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BGPv4PeerGroup'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv4PeerGroups(self):
return self.getObjects('BGPv4PeerGroup', self.cfgUrlBase)
def getIPv4RouteState(self,
DestinationNw):
obj = {
'DestinationNw' : DestinationNw,
}
reqUrl = self.stateUrlBase + 'IPv4Route'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv4RouteStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'IPv4Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv4RouteStates(self):
return self.getObjects('IPv4Route', self.stateUrlBase)
def getVrrpV6IntfState(self,
IntfRef,
VRID):
obj = {
'IntfRef' : IntfRef,
'VRID' : int(VRID),
}
reqUrl = self.stateUrlBase + 'VrrpV6Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVrrpV6IntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'VrrpV6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVrrpV6IntfStates(self):
return self.getObjects('VrrpV6Intf', self.stateUrlBase)
def getQsfpState(self,
QsfpId):
obj = {
'QsfpId' : int(QsfpId),
}
reqUrl = self.stateUrlBase + 'Qsfp'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getQsfpStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Qsfp'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllQsfpStates(self):
return self.getObjects('Qsfp', self.stateUrlBase)
def getBfdGlobalState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'BfdGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBfdGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BfdGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBfdGlobalStates(self):
return self.getObjects('BfdGlobal', self.stateUrlBase)
def updateVrrpGlobal(self,
Vrf,
Enable = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if Enable != None :
obj['Enable'] = True if Enable else False
reqUrl = self.cfgUrlBase+'VrrpGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateVrrpGlobalById(self,
objectId,
Enable = None):
obj = {}
if Enable != None:
obj['Enable'] = Enable
reqUrl = self.cfgUrlBase+'VrrpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateVrrpGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'VrrpGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getVrrpGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'VrrpGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVrrpGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'VrrpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVrrpGlobals(self):
return self.getObjects('VrrpGlobal', self.cfgUrlBase)
"""
.. automethod :: createPolicyASPathSet(self,
:param string Name : Policy ASPath List name. Policy ASPath List name.
:param string ASPathList : List of ASPaths part of this list. List of ASPaths part of this list.
"""
def createPolicyASPathSet(self,
Name,
ASPathList):
obj = {
'Name' : Name,
'ASPathList' : ASPathList,
}
reqUrl = self.cfgUrlBase+'PolicyASPathSet'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyASPathSet(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'PolicyASPathSet'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyASPathSetById(self, objectId ):
reqUrl = self.cfgUrlBase+'PolicyASPathSet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePolicyASPathSet(self,
Name,
ASPathList = None):
obj = {}
if Name != None :
obj['Name'] = Name
if ASPathList != None :
obj['ASPathList'] = ASPathList
reqUrl = self.cfgUrlBase+'PolicyASPathSet'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePolicyASPathSetById(self,
objectId,
ASPathList = None):
obj = {}
if ASPathList != None:
obj['ASPathList'] = ASPathList
reqUrl = self.cfgUrlBase+'PolicyASPathSet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePolicyASPathSet(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'PolicyASPathSet'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPolicyASPathSet(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'PolicyASPathSet'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyASPathSetById(self, objectId ):
reqUrl = self.cfgUrlBase + 'PolicyASPathSet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyASPathSets(self):
return self.getObjects('PolicyASPathSet', self.cfgUrlBase)
def getFanState(self,
FanId):
obj = {
'FanId' : int(FanId),
}
reqUrl = self.stateUrlBase + 'Fan'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getFanStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Fan'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllFanStates(self):
return self.getObjects('Fan', self.stateUrlBase)
"""
.. automethod :: executeFaultClear(self,
:param string OwnerName : Fault owner name Fault owner name
:param string EventName : Fault event name Fault event name
:param string SrcObjUUID : Source object Key UUID Source object Key UUID
"""
def executeFaultClear(self,
OwnerName,
EventName,
SrcObjUUID):
obj = {
'OwnerName' : OwnerName,
'EventName' : EventName,
'SrcObjUUID' : SrcObjUUID,
}
reqUrl = self.actionUrlBase+'FaultClear'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: createOspfv2Area(self,
:param string AreaId : A 32-bit integer uniquely identifying an area. Area ID 0.0.0.0 is used for the OSPF backbone. A 32-bit integer uniquely identifying an area. Area ID 0.0.0.0 is used for the OSPF backbone.
:param string AuthType : The authentication type specified for an area. The authentication type specified for an area.
:param string AdminState : Indicates if OSPF is enabled on this area Indicates if OSPF is enabled on this area
:param bool ImportASExtern : ExternalRoutingCapability if false AS External LSA will not be flooded into this area ExternalRoutingCapability if false AS External LSA will not be flooded into this area
"""
def createOspfv2Area(self,
AreaId,
AuthType='None',
AdminState='DOWN',
ImportASExtern=True):
obj = {
'AreaId' : AreaId,
'AuthType' : AuthType,
'AdminState' : AdminState,
'ImportASExtern' : True if ImportASExtern else False,
}
reqUrl = self.cfgUrlBase+'Ospfv2Area'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfv2Area(self,
AreaId):
obj = {
'AreaId' : AreaId,
}
reqUrl = self.cfgUrlBase+'Ospfv2Area'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfv2AreaById(self, objectId ):
reqUrl = self.cfgUrlBase+'Ospfv2Area'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateOspfv2Area(self,
AreaId,
AuthType = None,
AdminState = None,
ImportASExtern = None):
obj = {}
if AreaId != None :
obj['AreaId'] = AreaId
if AuthType != None :
obj['AuthType'] = AuthType
if AdminState != None :
obj['AdminState'] = AdminState
if ImportASExtern != None :
obj['ImportASExtern'] = True if ImportASExtern else False
reqUrl = self.cfgUrlBase+'Ospfv2Area'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateOspfv2AreaById(self,
objectId,
AuthType = None,
AdminState = None,
ImportASExtern = None):
obj = {}
if AuthType != None:
obj['AuthType'] = AuthType
if AdminState != None:
obj['AdminState'] = AdminState
if ImportASExtern != None:
obj['ImportASExtern'] = ImportASExtern
reqUrl = self.cfgUrlBase+'Ospfv2Area'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateOspfv2Area(self,
AreaId,
op,
path,
value,):
obj = {}
obj['AreaId'] = AreaId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Ospfv2Area'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getOspfv2Area(self,
AreaId):
obj = {
'AreaId' : AreaId,
}
reqUrl = self.cfgUrlBase + 'Ospfv2Area'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfv2AreaById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Ospfv2Area'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfv2Areas(self):
return self.getObjects('Ospfv2Area', self.cfgUrlBase)
def getBGPGlobalState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'BGPGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BGPGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPGlobalStates(self):
return self.getObjects('BGPGlobal', self.stateUrlBase)
def getBfdSessionState(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.stateUrlBase + 'BfdSession'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBfdSessionStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BfdSession'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBfdSessionStates(self):
return self.getObjects('BfdSession', self.stateUrlBase)
def getOspfv2NbrState(self,
IpAddr,
AddressLessIfIdx):
obj = {
'IpAddr' : IpAddr,
'AddressLessIfIdx' : int(AddressLessIfIdx),
}
reqUrl = self.stateUrlBase + 'Ospfv2Nbr'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfv2NbrStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Ospfv2Nbr'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfv2NbrStates(self):
return self.getObjects('Ospfv2Nbr', self.stateUrlBase)
def getOspfEventState(self,
Index):
obj = {
'Index' : int(Index),
}
reqUrl = self.stateUrlBase + 'OspfEvent'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfEventStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'OspfEvent'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfEventStates(self):
return self.getObjects('OspfEvent', self.stateUrlBase)
"""
.. automethod :: createLLDPIntf(self,
:param string IntfRef : IfIndex where lldp needs is enabled/disabled IfIndex where lldp needs is enabled/disabled
:param bool Enable : Enable/Disable lldp config Per Port Enable/Disable lldp config Per Port
:param string TxRxMode : Transmit/Receive mode configruration for the LLDP agent specific to an interface Transmit/Receive mode configruration for the LLDP agent specific to an interface
"""
def createLLDPIntf(self,
Enable=True,
TxRxMode='TxRx'):
obj = {
'IntfRef' : 'None',
'Enable' : True if Enable else False,
'TxRxMode' : TxRxMode,
}
reqUrl = self.cfgUrlBase+'LLDPIntf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteLLDPIntf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'LLDPIntf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteLLDPIntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'LLDPIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateLLDPIntf(self,
IntfRef,
Enable = None,
TxRxMode = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if Enable != None :
obj['Enable'] = True if Enable else False
if TxRxMode != None :
obj['TxRxMode'] = TxRxMode
reqUrl = self.cfgUrlBase+'LLDPIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateLLDPIntfById(self,
objectId,
Enable = None,
TxRxMode = None):
obj = {}
if Enable != None:
obj['Enable'] = Enable
if TxRxMode != None:
obj['TxRxMode'] = TxRxMode
reqUrl = self.cfgUrlBase+'LLDPIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateLLDPIntf(self,
IntfRef,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'LLDPIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getLLDPIntf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'LLDPIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLLDPIntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'LLDPIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLLDPIntfs(self):
return self.getObjects('LLDPIntf', self.cfgUrlBase)
def getBufferGlobalStatState(self,
DeviceId):
obj = {
'DeviceId' : int(DeviceId),
}
reqUrl = self.stateUrlBase + 'BufferGlobalStat'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBufferGlobalStatStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BufferGlobalStat'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBufferGlobalStatStates(self):
return self.getObjects('BufferGlobalStat', self.stateUrlBase)
def getIPv6IntfState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'IPv6Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv6IntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'IPv6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv6IntfStates(self):
return self.getObjects('IPv6Intf', self.stateUrlBase)
"""
.. automethod :: createIPv4Intf(self,
:param string IntfRef : Interface name or ifindex of port/lag or vlan on which this IPv4 object is configured Interface name or ifindex of port/lag or vlan on which this IPv4 object is configured
:param string IpAddr : Interface IP/Net mask in CIDR format to provision on switch interface Interface IP/Net mask in CIDR format to provision on switch interface
:param string AdminState : Administrative state of this IP interface Administrative state of this IP interface
"""
def createIPv4Intf(self,
IntfRef,
IpAddr,
AdminState='UP'):
obj = {
'IntfRef' : IntfRef,
'IpAddr' : IpAddr,
'AdminState' : AdminState,
}
reqUrl = self.cfgUrlBase+'IPv4Intf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIPv4Intf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'IPv4Intf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIPv4IntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'IPv4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateIPv4Intf(self,
IntfRef,
IpAddr = None,
AdminState = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if IpAddr != None :
obj['IpAddr'] = IpAddr
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'IPv4Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateIPv4IntfById(self,
objectId,
IpAddr = None,
AdminState = None):
obj = {}
if IpAddr != None:
obj['IpAddr'] = IpAddr
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'IPv4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateIPv4Intf(self,
IntfRef,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'IPv4Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getIPv4Intf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'IPv4Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv4IntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'IPv4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv4Intfs(self):
return self.getObjects('IPv4Intf', self.cfgUrlBase)
def getPolicyStmtState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'PolicyStmt'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyStmtStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PolicyStmt'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyStmtStates(self):
return self.getObjects('PolicyStmt', self.stateUrlBase)
def getPowerConverterSensorPMDataState(self,
Class,
Name):
obj = {
'Class' : Class,
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'PowerConverterSensorPMData'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPowerConverterSensorPMDataStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PowerConverterSensorPMData'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPowerConverterSensorPMDataStates(self):
return self.getObjects('PowerConverterSensorPMData', self.stateUrlBase)
"""
.. automethod :: createIPv6Route(self,
:param string DestinationNw : IP address of the route IP address of the route
:param string NetworkMask : mask of the route mask of the route
:param NextHopInfo NextHop :
:param string Protocol : Protocol type of the route Protocol type of the route
:param bool NullRoute : Specify if this is a null route Specify if this is a null route
:param uint32 Cost : Cost of this route Cost of this route
"""
def createIPv6Route(self,
DestinationNw,
NetworkMask,
NextHop,
Protocol='STATIC',
NullRoute=False,
Cost=0):
obj = {
'DestinationNw' : DestinationNw,
'NetworkMask' : NetworkMask,
'NextHop' : NextHop,
'Protocol' : Protocol,
'NullRoute' : True if NullRoute else False,
'Cost' : int(Cost),
}
reqUrl = self.cfgUrlBase+'IPv6Route'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIPv6Route(self,
DestinationNw,
NetworkMask):
obj = {
'DestinationNw' : DestinationNw,
'NetworkMask' : NetworkMask,
}
reqUrl = self.cfgUrlBase+'IPv6Route'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIPv6RouteById(self, objectId ):
reqUrl = self.cfgUrlBase+'IPv6Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateIPv6Route(self,
DestinationNw,
NetworkMask,
NextHop = None,
Protocol = None,
NullRoute = None,
Cost = None):
obj = {}
if DestinationNw != None :
obj['DestinationNw'] = DestinationNw
if NetworkMask != None :
obj['NetworkMask'] = NetworkMask
if NextHop != None :
obj['NextHop'] = NextHop
if Protocol != None :
obj['Protocol'] = Protocol
if NullRoute != None :
obj['NullRoute'] = True if NullRoute else False
if Cost != None :
obj['Cost'] = int(Cost)
reqUrl = self.cfgUrlBase+'IPv6Route'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateIPv6RouteById(self,
objectId,
NextHop = None,
Protocol = None,
NullRoute = None,
Cost = None):
obj = {}
if NextHop != None:
obj['NextHop'] = NextHop
if Protocol != None:
obj['Protocol'] = Protocol
if NullRoute != None:
obj['NullRoute'] = NullRoute
if Cost != None:
obj['Cost'] = Cost
reqUrl = self.cfgUrlBase+'IPv6Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateIPv6Route(self,
DestinationNw,
NetworkMask,
op,
path,
value,):
obj = {}
obj['DestinationNw'] = DestinationNw
obj['NetworkMask'] = NetworkMask
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'IPv6Route'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getIPv6Route(self,
DestinationNw,
NetworkMask):
obj = {
'DestinationNw' : DestinationNw,
'NetworkMask' : NetworkMask,
}
reqUrl = self.cfgUrlBase + 'IPv6Route'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv6RouteById(self, objectId ):
reqUrl = self.cfgUrlBase + 'IPv6Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv6Routes(self):
return self.getObjects('IPv6Route', self.cfgUrlBase)
"""
.. automethod :: createTemperatureSensor(self,
:param string Name : Temperature Sensor Name Temperature Sensor Name
:param float64 HigherAlarmThreshold : Higher Alarm Threshold for TCA Higher Alarm Threshold for TCA
:param float64 HigherWarningThreshold : Higher Warning Threshold for TCA Higher Warning Threshold for TCA
:param float64 LowerWarningThreshold : Lower Warning Threshold for TCA Lower Warning Threshold for TCA
:param float64 LowerAlarmThreshold : Lower Alarm Threshold for TCA Lower Alarm Threshold for TCA
:param string PMClassCAdminState : PM Class-C Admin State PM Class-C Admin State
:param string PMClassAAdminState : PM Class-A Admin State PM Class-A Admin State
:param string AdminState : Enable/Disable Enable/Disable
:param string PMClassBAdminState : PM Class-B Admin State PM Class-B Admin State
"""
def createTemperatureSensor(self,
Name,
HigherAlarmThreshold,
HigherWarningThreshold,
LowerWarningThreshold,
LowerAlarmThreshold,
PMClassCAdminState='Enable',
PMClassAAdminState='Enable',
AdminState='Enable',
PMClassBAdminState='Enable'):
obj = {
'Name' : Name,
'HigherAlarmThreshold' : HigherAlarmThreshold,
'HigherWarningThreshold' : HigherWarningThreshold,
'LowerWarningThreshold' : LowerWarningThreshold,
'LowerAlarmThreshold' : LowerAlarmThreshold,
'PMClassCAdminState' : PMClassCAdminState,
'PMClassAAdminState' : PMClassAAdminState,
'AdminState' : AdminState,
'PMClassBAdminState' : PMClassBAdminState,
}
reqUrl = self.cfgUrlBase+'TemperatureSensor'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteTemperatureSensor(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'TemperatureSensor'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteTemperatureSensorById(self, objectId ):
reqUrl = self.cfgUrlBase+'TemperatureSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateTemperatureSensor(self,
Name,
HigherAlarmThreshold = None,
HigherWarningThreshold = None,
LowerWarningThreshold = None,
LowerAlarmThreshold = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None,
PMClassBAdminState = None):
obj = {}
if Name != None :
obj['Name'] = Name
if HigherAlarmThreshold != None :
obj['HigherAlarmThreshold'] = HigherAlarmThreshold
if HigherWarningThreshold != None :
obj['HigherWarningThreshold'] = HigherWarningThreshold
if LowerWarningThreshold != None :
obj['LowerWarningThreshold'] = LowerWarningThreshold
if LowerAlarmThreshold != None :
obj['LowerAlarmThreshold'] = LowerAlarmThreshold
if PMClassCAdminState != None :
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None :
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None :
obj['AdminState'] = AdminState
if PMClassBAdminState != None :
obj['PMClassBAdminState'] = PMClassBAdminState
reqUrl = self.cfgUrlBase+'TemperatureSensor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateTemperatureSensorById(self,
objectId,
HigherAlarmThreshold = None,
HigherWarningThreshold = None,
LowerWarningThreshold = None,
LowerAlarmThreshold = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None,
PMClassBAdminState = None):
obj = {}
if HigherAlarmThreshold != None:
obj['HigherAlarmThreshold'] = HigherAlarmThreshold
if HigherWarningThreshold != None:
obj['HigherWarningThreshold'] = HigherWarningThreshold
if LowerWarningThreshold != None:
obj['LowerWarningThreshold'] = LowerWarningThreshold
if LowerAlarmThreshold != None:
obj['LowerAlarmThreshold'] = LowerAlarmThreshold
if PMClassCAdminState != None:
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None:
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None:
obj['AdminState'] = AdminState
if PMClassBAdminState != None:
obj['PMClassBAdminState'] = PMClassBAdminState
reqUrl = self.cfgUrlBase+'TemperatureSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateTemperatureSensor(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'TemperatureSensor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getTemperatureSensor(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'TemperatureSensor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getTemperatureSensorById(self, objectId ):
reqUrl = self.cfgUrlBase + 'TemperatureSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllTemperatureSensors(self):
return self.getObjects('TemperatureSensor', self.cfgUrlBase)
def getTemperatureSensorState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'TemperatureSensor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getTemperatureSensorStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'TemperatureSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllTemperatureSensorStates(self):
return self.getObjects('TemperatureSensor', self.stateUrlBase)
def getRouteStatsPerInterfaceState(self,
Intfref):
obj = {
'Intfref' : Intfref,
}
reqUrl = self.stateUrlBase + 'RouteStatsPerInterface'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getRouteStatsPerInterfaceStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'RouteStatsPerInterface'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllRouteStatsPerInterfaceStates(self):
return self.getObjects('RouteStatsPerInterface', self.stateUrlBase)
def getNDPGlobalState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'NDPGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getNDPGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'NDPGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllNDPGlobalStates(self):
return self.getObjects('NDPGlobal', self.stateUrlBase)
def getLacpGlobalState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'LacpGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLacpGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'LacpGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLacpGlobalStates(self):
return self.getObjects('LacpGlobal', self.stateUrlBase)
def getDHCPRelayIntfState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'DHCPRelayIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPRelayIntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DHCPRelayIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPRelayIntfStates(self):
return self.getObjects('DHCPRelayIntf', self.stateUrlBase)
def getVoltageSensorPMDataState(self,
Class,
Name):
obj = {
'Class' : Class,
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'VoltageSensorPMData'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVoltageSensorPMDataStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'VoltageSensorPMData'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVoltageSensorPMDataStates(self):
return self.getObjects('VoltageSensorPMData', self.stateUrlBase)
def getIPV6AdjState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'IPV6Adj'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPV6AdjStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'IPV6Adj'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPV6AdjStates(self):
return self.getObjects('IPV6Adj', self.stateUrlBase)
"""
.. automethod :: createDhcpIntfConfig(self,
:param string IntfRef : Interface name or ifindex of L3 interface object on which Dhcp Server need to be configured Interface name or ifindex of L3 interface object on which Dhcp Server need to be configured
:param string Subnet : Subnet Subnet
:param string SubnetMask : Subnet Mask Subnet Mask
:param string IPAddrRange : Range of IP Addresses DEFAULT Range of IP Addresses DEFAULT
:param string BroadcastAddr : Broadcast Address DEFAULT Broadcast Address DEFAULT
:param string RouterAddr : Router Address DEFAULT Router Address DEFAULT
:param string DNSServerAddr : Comma seperated List of DNS Server Address DEFAULT Comma seperated List of DNS Server Address DEFAULT
:param string DomainName : Domain Name Address DEFAULT Domain Name Address DEFAULT
:param bool Enable : Enable and Disable Control DEFAULT Enable and Disable Control DEFAULT
"""
def createDhcpIntfConfig(self,
IntfRef,
Subnet,
SubnetMask,
IPAddrRange,
BroadcastAddr,
RouterAddr,
DNSServerAddr,
DomainName,
Enable):
obj = {
'IntfRef' : IntfRef,
'Subnet' : Subnet,
'SubnetMask' : SubnetMask,
'IPAddrRange' : IPAddrRange,
'BroadcastAddr' : BroadcastAddr,
'RouterAddr' : RouterAddr,
'DNSServerAddr' : DNSServerAddr,
'DomainName' : DomainName,
'Enable' : True if Enable else False,
}
reqUrl = self.cfgUrlBase+'DhcpIntfConfig'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDhcpIntfConfig(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'DhcpIntfConfig'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDhcpIntfConfigById(self, objectId ):
reqUrl = self.cfgUrlBase+'DhcpIntfConfig'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateDhcpIntfConfig(self,
IntfRef,
Subnet = None,
SubnetMask = None,
IPAddrRange = None,
BroadcastAddr = None,
RouterAddr = None,
DNSServerAddr = None,
DomainName = None,
Enable = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if Subnet != None :
obj['Subnet'] = Subnet
if SubnetMask != None :
obj['SubnetMask'] = SubnetMask
if IPAddrRange != None :
obj['IPAddrRange'] = IPAddrRange
if BroadcastAddr != None :
obj['BroadcastAddr'] = BroadcastAddr
if RouterAddr != None :
obj['RouterAddr'] = RouterAddr
if DNSServerAddr != None :
obj['DNSServerAddr'] = DNSServerAddr
if DomainName != None :
obj['DomainName'] = DomainName
if Enable != None :
obj['Enable'] = True if Enable else False
reqUrl = self.cfgUrlBase+'DhcpIntfConfig'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDhcpIntfConfigById(self,
objectId,
Subnet = None,
SubnetMask = None,
IPAddrRange = None,
BroadcastAddr = None,
RouterAddr = None,
DNSServerAddr = None,
DomainName = None,
Enable = None):
obj = {}
if Subnet != None:
obj['Subnet'] = Subnet
if SubnetMask != None:
obj['SubnetMask'] = SubnetMask
if IPAddrRange != None:
obj['IPAddrRange'] = IPAddrRange
if BroadcastAddr != None:
obj['BroadcastAddr'] = BroadcastAddr
if RouterAddr != None:
obj['RouterAddr'] = RouterAddr
if DNSServerAddr != None:
obj['DNSServerAddr'] = DNSServerAddr
if DomainName != None:
obj['DomainName'] = DomainName
if Enable != None:
obj['Enable'] = Enable
reqUrl = self.cfgUrlBase+'DhcpIntfConfig'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDhcpIntfConfig(self,
IntfRef,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DhcpIntfConfig'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDhcpIntfConfig(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'DhcpIntfConfig'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDhcpIntfConfigById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DhcpIntfConfig'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDhcpIntfConfigs(self):
return self.getObjects('DhcpIntfConfig', self.cfgUrlBase)
"""
.. automethod :: createVrrpV6Intf(self,
:param string IntfRef : Interface (name) for which VRRP Version 3 aka VRRP with ipv6 Config needs to be done Interface (name) for which VRRP Version 3 aka VRRP with ipv6 Config needs to be done
:param int32 VRID : Virtual Router's Unique Identifier Virtual Router's Unique Identifier
:param string Address : Virtual Router IPv6 Address Virtual Router IPv6 Address
:param bool PreemptMode : Controls whether a (starting or restarting) higher-priority Backup router preempts a lower-priority Master router Controls whether a (starting or restarting) higher-priority Backup router preempts a lower-priority Master router
:param int32 Priority : Sending VRRP router's priority for the virtual router Sending VRRP router's priority for the virtual router
:param int32 AdvertisementInterval : Time interval between ADVERTISEMENTS Time interval between ADVERTISEMENTS
:param string AdminState : Vrrp State up or down Vrrp State up or down
:param bool AcceptMode : Controls whether a virtual router in Master state will accept packets addressed to the address owner's IPv6 address as its own if it is not the IPv6 address owner. Controls whether a virtual router in Master state will accept packets addressed to the address owner's IPv6 address as its own if it is not the IPv6 address owner.
"""
def createVrrpV6Intf(self,
IntfRef,
VRID,
Address,
PreemptMode=True,
Priority=100,
AdvertisementInterval=1,
AdminState='DOWN',
AcceptMode=False):
obj = {
'IntfRef' : IntfRef,
'VRID' : int(VRID),
'Address' : Address,
'PreemptMode' : True if PreemptMode else False,
'Priority' : int(Priority),
'AdvertisementInterval' : int(AdvertisementInterval),
'AdminState' : AdminState,
'AcceptMode' : True if AcceptMode else False,
}
reqUrl = self.cfgUrlBase+'VrrpV6Intf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVrrpV6Intf(self,
IntfRef,
VRID):
obj = {
'IntfRef' : IntfRef,
'VRID' : VRID,
}
reqUrl = self.cfgUrlBase+'VrrpV6Intf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVrrpV6IntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'VrrpV6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateVrrpV6Intf(self,
IntfRef,
VRID,
Address = None,
PreemptMode = None,
Priority = None,
AdvertisementInterval = None,
AdminState = None,
AcceptMode = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if VRID != None :
obj['VRID'] = int(VRID)
if Address != None :
obj['Address'] = Address
if PreemptMode != None :
obj['PreemptMode'] = True if PreemptMode else False
if Priority != None :
obj['Priority'] = int(Priority)
if AdvertisementInterval != None :
obj['AdvertisementInterval'] = int(AdvertisementInterval)
if AdminState != None :
obj['AdminState'] = AdminState
if AcceptMode != None :
obj['AcceptMode'] = True if AcceptMode else False
reqUrl = self.cfgUrlBase+'VrrpV6Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateVrrpV6IntfById(self,
objectId,
Address = None,
PreemptMode = None,
Priority = None,
AdvertisementInterval = None,
AdminState = None,
AcceptMode = None):
obj = {}
if Address != None:
obj['Address'] = Address
if PreemptMode != None:
obj['PreemptMode'] = PreemptMode
if Priority != None:
obj['Priority'] = Priority
if AdvertisementInterval != None:
obj['AdvertisementInterval'] = AdvertisementInterval
if AdminState != None:
obj['AdminState'] = AdminState
if AcceptMode != None:
obj['AcceptMode'] = AcceptMode
reqUrl = self.cfgUrlBase+'VrrpV6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateVrrpV6Intf(self,
IntfRef,
VRID,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['VRID'] = VRID
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'VrrpV6Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getVrrpV6Intf(self,
IntfRef,
VRID):
obj = {
'IntfRef' : IntfRef,
'VRID' : int(VRID),
}
reqUrl = self.cfgUrlBase + 'VrrpV6Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVrrpV6IntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'VrrpV6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVrrpV6Intfs(self):
return self.getObjects('VrrpV6Intf', self.cfgUrlBase)
def getPlatformState(self,
ObjName):
obj = {
'ObjName' : ObjName,
}
reqUrl = self.stateUrlBase + 'Platform'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPlatformStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Platform'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPlatformStates(self):
return self.getObjects('Platform', self.stateUrlBase)
def getSystemStatusState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'SystemStatus'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSystemStatusStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'SystemStatus'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSystemStatusStates(self):
return self.getObjects('SystemStatus', self.stateUrlBase)
"""
.. automethod :: executeArpDeleteByIPv4Addr(self,
:param string IpAddr : End Host IP Address for which corresponding Arp entry needed to be deleted End Host IP Address for which corresponding Arp entry needed to be deleted
"""
def executeArpDeleteByIPv4Addr(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.actionUrlBase+'ArpDeleteByIPv4Addr'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: createFanSensor(self,
:param string Name : Fan Sensor Name Fan Sensor Name
:param int32 HigherAlarmThreshold : Higher Alarm Threshold for TCA Higher Alarm Threshold for TCA
:param int32 HigherWarningThreshold : Higher Warning Threshold for TCA Higher Warning Threshold for TCA
:param int32 LowerWarningThreshold : Lower Warning Threshold for TCA Lower Warning Threshold for TCA
:param int32 LowerAlarmThreshold : Lower Alarm Threshold for TCA Lower Alarm Threshold for TCA
:param string PMClassCAdminState : PM Class-C Admin State PM Class-C Admin State
:param string PMClassAAdminState : PM Class-A Admin State PM Class-A Admin State
:param string AdminState : Enable/Disable Enable/Disable
:param string PMClassBAdminState : PM Class-B Admin State PM Class-B Admin State
"""
def createFanSensor(self,
Name,
HigherAlarmThreshold,
HigherWarningThreshold,
LowerWarningThreshold,
LowerAlarmThreshold,
PMClassCAdminState='Enable',
PMClassAAdminState='Enable',
AdminState='Enable',
PMClassBAdminState='Enable'):
obj = {
'Name' : Name,
'HigherAlarmThreshold' : int(HigherAlarmThreshold),
'HigherWarningThreshold' : int(HigherWarningThreshold),
'LowerWarningThreshold' : int(LowerWarningThreshold),
'LowerAlarmThreshold' : int(LowerAlarmThreshold),
'PMClassCAdminState' : PMClassCAdminState,
'PMClassAAdminState' : PMClassAAdminState,
'AdminState' : AdminState,
'PMClassBAdminState' : PMClassBAdminState,
}
reqUrl = self.cfgUrlBase+'FanSensor'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteFanSensor(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'FanSensor'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteFanSensorById(self, objectId ):
reqUrl = self.cfgUrlBase+'FanSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateFanSensor(self,
Name,
HigherAlarmThreshold = None,
HigherWarningThreshold = None,
LowerWarningThreshold = None,
LowerAlarmThreshold = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None,
PMClassBAdminState = None):
obj = {}
if Name != None :
obj['Name'] = Name
if HigherAlarmThreshold != None :
obj['HigherAlarmThreshold'] = int(HigherAlarmThreshold)
if HigherWarningThreshold != None :
obj['HigherWarningThreshold'] = int(HigherWarningThreshold)
if LowerWarningThreshold != None :
obj['LowerWarningThreshold'] = int(LowerWarningThreshold)
if LowerAlarmThreshold != None :
obj['LowerAlarmThreshold'] = int(LowerAlarmThreshold)
if PMClassCAdminState != None :
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None :
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None :
obj['AdminState'] = AdminState
if PMClassBAdminState != None :
obj['PMClassBAdminState'] = PMClassBAdminState
reqUrl = self.cfgUrlBase+'FanSensor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateFanSensorById(self,
objectId,
HigherAlarmThreshold = None,
HigherWarningThreshold = None,
LowerWarningThreshold = None,
LowerAlarmThreshold = None,
PMClassCAdminState = None,
PMClassAAdminState = None,
AdminState = None,
PMClassBAdminState = None):
obj = {}
if HigherAlarmThreshold != None:
obj['HigherAlarmThreshold'] = HigherAlarmThreshold
if HigherWarningThreshold != None:
obj['HigherWarningThreshold'] = HigherWarningThreshold
if LowerWarningThreshold != None:
obj['LowerWarningThreshold'] = LowerWarningThreshold
if LowerAlarmThreshold != None:
obj['LowerAlarmThreshold'] = LowerAlarmThreshold
if PMClassCAdminState != None:
obj['PMClassCAdminState'] = PMClassCAdminState
if PMClassAAdminState != None:
obj['PMClassAAdminState'] = PMClassAAdminState
if AdminState != None:
obj['AdminState'] = AdminState
if PMClassBAdminState != None:
obj['PMClassBAdminState'] = PMClassBAdminState
reqUrl = self.cfgUrlBase+'FanSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateFanSensor(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'FanSensor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getFanSensor(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'FanSensor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getFanSensorById(self, objectId ):
reqUrl = self.cfgUrlBase + 'FanSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllFanSensors(self):
return self.getObjects('FanSensor', self.cfgUrlBase)
"""
.. automethod :: createIpTableAcl(self,
:param string Name : Ip Table ACL rule name Ip Table ACL rule name
:param string Action : ACCEPT or DROP ACCEPT or DROP
:param string IpAddr : ip address of subnet or host ip address of subnet or host
:param string Protocol :
:param string Port :
:param string PhysicalPort : IfIndex where the acl rule is to be applied IfIndex where the acl rule is to be applied
"""
def createIpTableAcl(self,
Name,
Action,
IpAddr,
Protocol,
Port='all',
PhysicalPort='all'):
obj = {
'Name' : Name,
'Action' : Action,
'IpAddr' : IpAddr,
'Protocol' : Protocol,
'Port' : Port,
'PhysicalPort' : PhysicalPort,
}
reqUrl = self.cfgUrlBase+'IpTableAcl'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIpTableAcl(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'IpTableAcl'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIpTableAclById(self, objectId ):
reqUrl = self.cfgUrlBase+'IpTableAcl'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateIpTableAcl(self,
Name,
Action = None,
IpAddr = None,
Protocol = None,
Port = None,
PhysicalPort = None):
obj = {}
if Name != None :
obj['Name'] = Name
if Action != None :
obj['Action'] = Action
if IpAddr != None :
obj['IpAddr'] = IpAddr
if Protocol != None :
obj['Protocol'] = Protocol
if Port != None :
obj['Port'] = Port
if PhysicalPort != None :
obj['PhysicalPort'] = PhysicalPort
reqUrl = self.cfgUrlBase+'IpTableAcl'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateIpTableAclById(self,
objectId,
Action = None,
IpAddr = None,
Protocol = None,
Port = None,
PhysicalPort = None):
obj = {}
if Action != None:
obj['Action'] = Action
if IpAddr != None:
obj['IpAddr'] = IpAddr
if Protocol != None:
obj['Protocol'] = Protocol
if Port != None:
obj['Port'] = Port
if PhysicalPort != None:
obj['PhysicalPort'] = PhysicalPort
reqUrl = self.cfgUrlBase+'IpTableAcl'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateIpTableAcl(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'IpTableAcl'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getIpTableAcl(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'IpTableAcl'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIpTableAclById(self, objectId ):
reqUrl = self.cfgUrlBase + 'IpTableAcl'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIpTableAcls(self):
return self.getObjects('IpTableAcl', self.cfgUrlBase)
def getDHCPv6RelayClientState(self,
MacAddr):
obj = {
'MacAddr' : MacAddr,
}
reqUrl = self.stateUrlBase + 'DHCPv6RelayClient'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPv6RelayClientStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DHCPv6RelayClient'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPv6RelayClientStates(self):
return self.getObjects('DHCPv6RelayClient', self.stateUrlBase)
def getIppLinkState(self,
IntfRef,
DrNameRef):
obj = {
'IntfRef' : IntfRef,
'DrNameRef' : DrNameRef,
}
reqUrl = self.stateUrlBase + 'IppLink'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIppLinkStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'IppLink'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIppLinkStates(self):
return self.getObjects('IppLink', self.stateUrlBase)
"""
.. automethod :: createDHCPRelayIntf(self,
:param string IntfRef : DHCP Client facing interface reference for which Relay Agent needs to be configured DHCP Client facing interface reference for which Relay Agent needs to be configured
:param bool Enable : Interface level config for enabling/disabling the relay agent Interface level config for enabling/disabling the relay agent
:param string ServerIp : DHCP Server(s) where relay agent can relay client dhcp requests DHCP Server(s) where relay agent can relay client dhcp requests
"""
def createDHCPRelayIntf(self,
IntfRef,
Enable,
ServerIp):
obj = {
'IntfRef' : IntfRef,
'Enable' : True if Enable else False,
'ServerIp' : ServerIp,
}
reqUrl = self.cfgUrlBase+'DHCPRelayIntf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDHCPRelayIntf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'DHCPRelayIntf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDHCPRelayIntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'DHCPRelayIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateDHCPRelayIntf(self,
IntfRef,
Enable = None,
ServerIp = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if Enable != None :
obj['Enable'] = True if Enable else False
if ServerIp != None :
obj['ServerIp'] = ServerIp
reqUrl = self.cfgUrlBase+'DHCPRelayIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDHCPRelayIntfById(self,
objectId,
Enable = None,
ServerIp = None):
obj = {}
if Enable != None:
obj['Enable'] = Enable
if ServerIp != None:
obj['ServerIp'] = ServerIp
reqUrl = self.cfgUrlBase+'DHCPRelayIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDHCPRelayIntf(self,
IntfRef,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DHCPRelayIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDHCPRelayIntf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'DHCPRelayIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPRelayIntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DHCPRelayIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPRelayIntfs(self):
return self.getObjects('DHCPRelayIntf', self.cfgUrlBase)
def getDWDMModuleNwIntfState(self,
NwIntfId,
ModuleId):
obj = {
'NwIntfId' : int(NwIntfId),
'ModuleId' : int(ModuleId),
}
reqUrl = self.stateUrlBase + 'DWDMModuleNwIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDWDMModuleNwIntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DWDMModuleNwIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDWDMModuleNwIntfStates(self):
return self.getObjects('DWDMModuleNwIntf', self.stateUrlBase)
"""
.. automethod :: createOspfIfEntry(self,
:param string IfIpAddress : The IP address of this OSPF interface. The IP address of this OSPF interface.
:param int32 AddressLessIf : For the purpose of easing the instancing of addressed and addressless interfaces; this variable takes the value 0 on interfaces with IP addresses and the corresponding value of ifIndex for interfaces having no IP address. For the purpose of easing the instancing of addressed and addressless interfaces; this variable takes the value 0 on interfaces with IP addresses and the corresponding value of ifIndex for interfaces having no IP address.
:param int32 IfAdminStat : Indiacates if OSPF is enabled on this interface Indiacates if OSPF is enabled on this interface
:param string IfAreaId : A 32-bit integer uniquely identifying the area to which the interface connects. Area ID 0.0.0.0 is used for the OSPF backbone. A 32-bit integer uniquely identifying the area to which the interface connects. Area ID 0.0.0.0 is used for the OSPF backbone.
:param string IfType : The OSPF interface type. By way of a default The OSPF interface type. By way of a default
:param int32 IfRtrPriority : The priority of this interface. Used in multi-access networks The priority of this interface. Used in multi-access networks
:param int32 IfTransitDelay : The estimated number of seconds it takes to transmit a link state update packet over this interface. Note that the minimal value SHOULD be 1 second. The estimated number of seconds it takes to transmit a link state update packet over this interface. Note that the minimal value SHOULD be 1 second.
:param int32 IfRetransInterval : The number of seconds between link state advertisement retransmissions The number of seconds between link state advertisement retransmissions
:param int32 IfPollInterval : The larger time interval The larger time interval
:param int32 IfHelloInterval : The length of time The length of time
:param int32 IfRtrDeadInterval : The number of seconds that a router's Hello packets have not been seen before its neighbors declare the router down. This should be some multiple of the Hello interval. This value must be the same for all routers attached to a common network. The number of seconds that a router's Hello packets have not been seen before its neighbors declare the router down. This should be some multiple of the Hello interval. This value must be the same for all routers attached to a common network.
"""
def createOspfIfEntry(self,
IfIpAddress,
AddressLessIf,
IfAdminStat,
IfAreaId,
IfType,
IfRtrPriority,
IfTransitDelay,
IfRetransInterval,
IfPollInterval,
IfHelloInterval=10,
IfRtrDeadInterval=40):
obj = {
'IfIpAddress' : IfIpAddress,
'AddressLessIf' : int(AddressLessIf),
'IfAdminStat' : int(IfAdminStat),
'IfAreaId' : IfAreaId,
'IfType' : IfType,
'IfRtrPriority' : int(IfRtrPriority),
'IfTransitDelay' : int(IfTransitDelay),
'IfRetransInterval' : int(IfRetransInterval),
'IfPollInterval' : int(IfPollInterval),
'IfHelloInterval' : int(IfHelloInterval),
'IfRtrDeadInterval' : int(IfRtrDeadInterval),
}
reqUrl = self.cfgUrlBase+'OspfIfEntry'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfIfEntry(self,
IfIpAddress,
AddressLessIf):
obj = {
'IfIpAddress' : IfIpAddress,
'AddressLessIf' : AddressLessIf,
}
reqUrl = self.cfgUrlBase+'OspfIfEntry'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfIfEntryById(self, objectId ):
reqUrl = self.cfgUrlBase+'OspfIfEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateOspfIfEntry(self,
IfIpAddress,
AddressLessIf,
IfAdminStat = None,
IfAreaId = None,
IfType = None,
IfRtrPriority = None,
IfTransitDelay = None,
IfRetransInterval = None,
IfPollInterval = None,
IfHelloInterval = None,
IfRtrDeadInterval = None):
obj = {}
if IfIpAddress != None :
obj['IfIpAddress'] = IfIpAddress
if AddressLessIf != None :
obj['AddressLessIf'] = int(AddressLessIf)
if IfAdminStat != None :
obj['IfAdminStat'] = int(IfAdminStat)
if IfAreaId != None :
obj['IfAreaId'] = IfAreaId
if IfType != None :
obj['IfType'] = IfType
if IfRtrPriority != None :
obj['IfRtrPriority'] = int(IfRtrPriority)
if IfTransitDelay != None :
obj['IfTransitDelay'] = int(IfTransitDelay)
if IfRetransInterval != None :
obj['IfRetransInterval'] = int(IfRetransInterval)
if IfPollInterval != None :
obj['IfPollInterval'] = int(IfPollInterval)
if IfHelloInterval != None :
obj['IfHelloInterval'] = int(IfHelloInterval)
if IfRtrDeadInterval != None :
obj['IfRtrDeadInterval'] = int(IfRtrDeadInterval)
reqUrl = self.cfgUrlBase+'OspfIfEntry'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateOspfIfEntryById(self,
objectId,
IfAdminStat = None,
IfAreaId = None,
IfType = None,
IfRtrPriority = None,
IfTransitDelay = None,
IfRetransInterval = None,
IfPollInterval = None,
IfHelloInterval = None,
IfRtrDeadInterval = None):
obj = {}
if IfAdminStat != None:
obj['IfAdminStat'] = IfAdminStat
if IfAreaId != None:
obj['IfAreaId'] = IfAreaId
if IfType != None:
obj['IfType'] = IfType
if IfRtrPriority != None:
obj['IfRtrPriority'] = IfRtrPriority
if IfTransitDelay != None:
obj['IfTransitDelay'] = IfTransitDelay
if IfRetransInterval != None:
obj['IfRetransInterval'] = IfRetransInterval
if IfPollInterval != None:
obj['IfPollInterval'] = IfPollInterval
if IfHelloInterval != None:
obj['IfHelloInterval'] = IfHelloInterval
if IfRtrDeadInterval != None:
obj['IfRtrDeadInterval'] = IfRtrDeadInterval
reqUrl = self.cfgUrlBase+'OspfIfEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateOspfIfEntry(self,
IfIpAddress,
AddressLessIf,
op,
path,
value,):
obj = {}
obj['IfIpAddress'] = IfIpAddress
obj['AddressLessIf'] = AddressLessIf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'OspfIfEntry'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getOspfIfEntry(self,
IfIpAddress,
AddressLessIf):
obj = {
'IfIpAddress' : IfIpAddress,
'AddressLessIf' : int(AddressLessIf),
}
reqUrl = self.cfgUrlBase + 'OspfIfEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfIfEntryById(self, objectId ):
reqUrl = self.cfgUrlBase + 'OspfIfEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfIfEntrys(self):
return self.getObjects('OspfIfEntry', self.cfgUrlBase)
def getDHCPv6RelayIntfServerState(self,
IntfRef,
ServerIp):
obj = {
'IntfRef' : IntfRef,
'ServerIp' : ServerIp,
}
reqUrl = self.stateUrlBase + 'DHCPv6RelayIntfServer'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPv6RelayIntfServerStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DHCPv6RelayIntfServer'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPv6RelayIntfServerStates(self):
return self.getObjects('DHCPv6RelayIntfServer', self.stateUrlBase)
"""
.. automethod :: createDHCPv6RelayIntf(self,
:param string IntfRef : DHCP Client facing interface reference for which Relay Agent needs to be configured DHCP Client facing interface reference for which Relay Agent needs to be configured
:param bool Enable : Interface level config for enabling/disabling the relay agent Interface level config for enabling/disabling the relay agent
:param string ServerIp : DHCP Server(s) where relay agent can relay client dhcp requests DHCP Server(s) where relay agent can relay client dhcp requests
:param string UpstreamIntfs : DHCP Server facing interfaces where Relay Forward messages are multicasted DHCP Server facing interfaces where Relay Forward messages are multicasted
"""
def createDHCPv6RelayIntf(self,
IntfRef,
Enable,
ServerIp,
UpstreamIntfs):
obj = {
'IntfRef' : IntfRef,
'Enable' : True if Enable else False,
'ServerIp' : ServerIp,
'UpstreamIntfs' : UpstreamIntfs,
}
reqUrl = self.cfgUrlBase+'DHCPv6RelayIntf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDHCPv6RelayIntf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'DHCPv6RelayIntf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDHCPv6RelayIntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'DHCPv6RelayIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateDHCPv6RelayIntf(self,
IntfRef,
Enable = None,
ServerIp = None,
UpstreamIntfs = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if Enable != None :
obj['Enable'] = True if Enable else False
if ServerIp != None :
obj['ServerIp'] = ServerIp
if UpstreamIntfs != None :
obj['UpstreamIntfs'] = UpstreamIntfs
reqUrl = self.cfgUrlBase+'DHCPv6RelayIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDHCPv6RelayIntfById(self,
objectId,
Enable = None,
ServerIp = None,
UpstreamIntfs = None):
obj = {}
if Enable != None:
obj['Enable'] = Enable
if ServerIp != None:
obj['ServerIp'] = ServerIp
if UpstreamIntfs != None:
obj['UpstreamIntfs'] = UpstreamIntfs
reqUrl = self.cfgUrlBase+'DHCPv6RelayIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDHCPv6RelayIntf(self,
IntfRef,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DHCPv6RelayIntf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDHCPv6RelayIntf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'DHCPv6RelayIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPv6RelayIntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DHCPv6RelayIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPv6RelayIntfs(self):
return self.getObjects('DHCPv6RelayIntf', self.cfgUrlBase)
def updateBGPGlobal(self,
Vrf,
DefaultMED = None,
Defaultv4Route = None,
UseMultiplePaths = None,
Defaultv6Route = None,
ASNum = None,
EBGPMaxPaths = None,
EBGPAllowMultipleAS = None,
Disabled = None,
RouterId = None,
IBGPMaxPaths = None,
Redistribution = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if DefaultMED != None :
obj['DefaultMED'] = int(DefaultMED)
if Defaultv4Route != None :
obj['Defaultv4Route'] = True if Defaultv4Route else False
if UseMultiplePaths != None :
obj['UseMultiplePaths'] = True if UseMultiplePaths else False
if Defaultv6Route != None :
obj['Defaultv6Route'] = True if Defaultv6Route else False
if ASNum != None :
obj['ASNum'] = ASNum
if EBGPMaxPaths != None :
obj['EBGPMaxPaths'] = int(EBGPMaxPaths)
if EBGPAllowMultipleAS != None :
obj['EBGPAllowMultipleAS'] = True if EBGPAllowMultipleAS else False
if Disabled != None :
obj['Disabled'] = True if Disabled else False
if RouterId != None :
obj['RouterId'] = RouterId
if IBGPMaxPaths != None :
obj['IBGPMaxPaths'] = int(IBGPMaxPaths)
if Redistribution != None :
obj['Redistribution'] = Redistribution
reqUrl = self.cfgUrlBase+'BGPGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBGPGlobalById(self,
objectId,
DefaultMED = None,
Defaultv4Route = None,
UseMultiplePaths = None,
Defaultv6Route = None,
ASNum = None,
EBGPMaxPaths = None,
EBGPAllowMultipleAS = None,
Disabled = None,
RouterId = None,
IBGPMaxPaths = None,
Redistribution = None):
obj = {}
if DefaultMED != None:
obj['DefaultMED'] = DefaultMED
if Defaultv4Route != None:
obj['Defaultv4Route'] = Defaultv4Route
if UseMultiplePaths != None:
obj['UseMultiplePaths'] = UseMultiplePaths
if Defaultv6Route != None:
obj['Defaultv6Route'] = Defaultv6Route
if ASNum != None:
obj['ASNum'] = ASNum
if EBGPMaxPaths != None:
obj['EBGPMaxPaths'] = EBGPMaxPaths
if EBGPAllowMultipleAS != None:
obj['EBGPAllowMultipleAS'] = EBGPAllowMultipleAS
if Disabled != None:
obj['Disabled'] = Disabled
if RouterId != None:
obj['RouterId'] = RouterId
if IBGPMaxPaths != None:
obj['IBGPMaxPaths'] = IBGPMaxPaths
if Redistribution != None:
obj['Redistribution'] = Redistribution
reqUrl = self.cfgUrlBase+'BGPGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBGPGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BGPGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBGPGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'BGPGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BGPGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPGlobals(self):
return self.getObjects('BGPGlobal', self.cfgUrlBase)
def updateAclGlobal(self,
AclGlobal,
GlobalDropEnable = None):
obj = {}
if AclGlobal != None :
obj['AclGlobal'] = AclGlobal
if GlobalDropEnable != None :
obj['GlobalDropEnable'] = GlobalDropEnable
reqUrl = self.cfgUrlBase+'AclGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateAclGlobalById(self,
objectId,
GlobalDropEnable = None):
obj = {}
if GlobalDropEnable != None:
obj['GlobalDropEnable'] = GlobalDropEnable
reqUrl = self.cfgUrlBase+'AclGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateAclGlobal(self,
AclGlobal,
op,
path,
value,):
obj = {}
obj['AclGlobal'] = AclGlobal
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'AclGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getAclGlobal(self,
AclGlobal):
obj = {
'AclGlobal' : AclGlobal,
}
reqUrl = self.cfgUrlBase + 'AclGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAclGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'AclGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAclGlobals(self):
return self.getObjects('AclGlobal', self.cfgUrlBase)
def getOspfv2GlobalState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'Ospfv2Global'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfv2GlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Ospfv2Global'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfv2GlobalStates(self):
return self.getObjects('Ospfv2Global', self.stateUrlBase)
def getOspfAreaEntryState(self,
AreaId):
obj = {
'AreaId' : AreaId,
}
reqUrl = self.stateUrlBase + 'OspfAreaEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfAreaEntryStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'OspfAreaEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfAreaEntryStates(self):
return self.getObjects('OspfAreaEntry', self.stateUrlBase)
def getLLDPGlobalState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'LLDPGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLLDPGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'LLDPGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLLDPGlobalStates(self):
return self.getObjects('LLDPGlobal', self.stateUrlBase)
def updateNDPGlobal(self,
Vrf,
RetransmitInterval = None,
RouterAdvertisementInterval = None,
ReachableTime = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if RetransmitInterval != None :
obj['RetransmitInterval'] = int(RetransmitInterval)
if RouterAdvertisementInterval != None :
obj['RouterAdvertisementInterval'] = int(RouterAdvertisementInterval)
if ReachableTime != None :
obj['ReachableTime'] = int(ReachableTime)
reqUrl = self.cfgUrlBase+'NDPGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateNDPGlobalById(self,
objectId,
RetransmitInterval = None,
RouterAdvertisementInterval = None,
ReachableTime = None):
obj = {}
if RetransmitInterval != None:
obj['RetransmitInterval'] = RetransmitInterval
if RouterAdvertisementInterval != None:
obj['RouterAdvertisementInterval'] = RouterAdvertisementInterval
if ReachableTime != None:
obj['ReachableTime'] = ReachableTime
reqUrl = self.cfgUrlBase+'NDPGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateNDPGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'NDPGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getNDPGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'NDPGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getNDPGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'NDPGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllNDPGlobals(self):
return self.getObjects('NDPGlobal', self.cfgUrlBase)
def getPsuState(self,
PsuId):
obj = {
'PsuId' : int(PsuId),
}
reqUrl = self.stateUrlBase + 'Psu'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPsuStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Psu'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPsuStates(self):
return self.getObjects('Psu', self.stateUrlBase)
"""
.. automethod :: createBfdSession(self,
:param string IpAddr : BFD neighbor IP address BFD neighbor IP address
:param string Interface : Name of the interface this session has to be established on Name of the interface this session has to be established on
:param string Owner : Module requesting BFD session configuration Module requesting BFD session configuration
:param bool PerLink : Run BFD sessions on individual link of a LAG if the neighbor is reachable through LAG Run BFD sessions on individual link of a LAG if the neighbor is reachable through LAG
:param string ParamName : Name of the session parameters object to be applied on this session Name of the session parameters object to be applied on this session
"""
def createBfdSession(self,
IpAddr,
Interface='None',
Owner='user',
PerLink=False,
ParamName='default'):
obj = {
'IpAddr' : IpAddr,
'Interface' : Interface,
'Owner' : Owner,
'PerLink' : True if PerLink else False,
'ParamName' : ParamName,
}
reqUrl = self.cfgUrlBase+'BfdSession'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBfdSession(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.cfgUrlBase+'BfdSession'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBfdSessionById(self, objectId ):
reqUrl = self.cfgUrlBase+'BfdSession'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateBfdSession(self,
IpAddr,
Interface = None,
Owner = None,
PerLink = None,
ParamName = None):
obj = {}
if IpAddr != None :
obj['IpAddr'] = IpAddr
if Interface != None :
obj['Interface'] = Interface
if Owner != None :
obj['Owner'] = Owner
if PerLink != None :
obj['PerLink'] = True if PerLink else False
if ParamName != None :
obj['ParamName'] = ParamName
reqUrl = self.cfgUrlBase+'BfdSession'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBfdSessionById(self,
objectId,
Interface = None,
Owner = None,
PerLink = None,
ParamName = None):
obj = {}
if Interface != None:
obj['Interface'] = Interface
if Owner != None:
obj['Owner'] = Owner
if PerLink != None:
obj['PerLink'] = PerLink
if ParamName != None:
obj['ParamName'] = ParamName
reqUrl = self.cfgUrlBase+'BfdSession'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBfdSession(self,
IpAddr,
op,
path,
value,):
obj = {}
obj['IpAddr'] = IpAddr
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BfdSession'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBfdSession(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.cfgUrlBase + 'BfdSession'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBfdSessionById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BfdSession'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBfdSessions(self):
return self.getObjects('BfdSession', self.cfgUrlBase)
def getSubIPv4IntfState(self,
IntfRef,
Type):
obj = {
'IntfRef' : IntfRef,
'Type' : Type,
}
reqUrl = self.stateUrlBase + 'SubIPv4Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSubIPv4IntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'SubIPv4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSubIPv4IntfStates(self):
return self.getObjects('SubIPv4Intf', self.stateUrlBase)
def getPolicyConditionState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'PolicyCondition'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyConditionStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PolicyCondition'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyConditionStates(self):
return self.getObjects('PolicyCondition', self.stateUrlBase)
def getStpPortState(self,
IntfRef,
Vlan):
obj = {
'IntfRef' : IntfRef,
'Vlan' : int(Vlan),
}
reqUrl = self.stateUrlBase + 'StpPort'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getStpPortStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'StpPort'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllStpPortStates(self):
return self.getObjects('StpPort', self.stateUrlBase)
def getXponderGlobalState(self,
XponderId):
obj = {
'XponderId' : int(XponderId),
}
reqUrl = self.stateUrlBase + 'XponderGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getXponderGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'XponderGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllXponderGlobalStates(self):
return self.getObjects('XponderGlobal', self.stateUrlBase)
def updateLLDPGlobal(self,
Vrf,
TxRxMode = None,
SnoopAndDrop = None,
Enable = None,
TranmitInterval = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if TxRxMode != None :
obj['TxRxMode'] = TxRxMode
if SnoopAndDrop != None :
obj['SnoopAndDrop'] = True if SnoopAndDrop else False
if Enable != None :
obj['Enable'] = True if Enable else False
if TranmitInterval != None :
obj['TranmitInterval'] = int(TranmitInterval)
reqUrl = self.cfgUrlBase+'LLDPGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateLLDPGlobalById(self,
objectId,
TxRxMode = None,
SnoopAndDrop = None,
Enable = None,
TranmitInterval = None):
obj = {}
if TxRxMode != None:
obj['TxRxMode'] = TxRxMode
if SnoopAndDrop != None:
obj['SnoopAndDrop'] = SnoopAndDrop
if Enable != None:
obj['Enable'] = Enable
if TranmitInterval != None:
obj['TranmitInterval'] = TranmitInterval
reqUrl = self.cfgUrlBase+'LLDPGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateLLDPGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'LLDPGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getLLDPGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'LLDPGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLLDPGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'LLDPGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLLDPGlobals(self):
return self.getObjects('LLDPGlobal', self.cfgUrlBase)
def getIPv6RouteHwState(self,
DestinationNw):
obj = {
'DestinationNw' : DestinationNw,
}
reqUrl = self.stateUrlBase + 'IPv6RouteHw'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv6RouteHwStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'IPv6RouteHw'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv6RouteHwStates(self):
return self.getObjects('IPv6RouteHw', self.stateUrlBase)
"""
.. automethod :: createSubIPv4Intf(self,
:param string Type : Type of interface Type of interface
:param string IntfRef : Intf name for which ipv4Intf sub interface is to be configured Intf name for which ipv4Intf sub interface is to be configured
:param string IpAddr : Ip Address for sub interface Ip Address for sub interface
:param string MacAddr : Mac address to be used for the sub interface. If none specified IPv4Intf mac address will be used Mac address to be used for the sub interface. If none specified IPv4Intf mac address will be used
:param bool Enable : Enable or disable this interface Enable or disable this interface
"""
def createSubIPv4Intf(self,
Type,
IntfRef,
IpAddr,
MacAddr='',
Enable=True):
obj = {
'Type' : Type,
'IntfRef' : IntfRef,
'IpAddr' : IpAddr,
'MacAddr' : MacAddr,
'Enable' : True if Enable else False,
}
reqUrl = self.cfgUrlBase+'SubIPv4Intf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteSubIPv4Intf(self,
Type,
IntfRef):
obj = {
'Type' : Type,
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'SubIPv4Intf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteSubIPv4IntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'SubIPv4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateSubIPv4Intf(self,
Type,
IntfRef,
IpAddr = None,
MacAddr = None,
Enable = None):
obj = {}
if Type != None :
obj['Type'] = Type
if IntfRef != None :
obj['IntfRef'] = IntfRef
if IpAddr != None :
obj['IpAddr'] = IpAddr
if MacAddr != None :
obj['MacAddr'] = MacAddr
if Enable != None :
obj['Enable'] = True if Enable else False
reqUrl = self.cfgUrlBase+'SubIPv4Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateSubIPv4IntfById(self,
objectId,
IpAddr = None,
MacAddr = None,
Enable = None):
obj = {}
if IpAddr != None:
obj['IpAddr'] = IpAddr
if MacAddr != None:
obj['MacAddr'] = MacAddr
if Enable != None:
obj['Enable'] = Enable
reqUrl = self.cfgUrlBase+'SubIPv4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateSubIPv4Intf(self,
Type,
IntfRef,
op,
path,
value,):
obj = {}
obj['Type'] = Type
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'SubIPv4Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getSubIPv4Intf(self,
Type,
IntfRef):
obj = {
'Type' : Type,
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'SubIPv4Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSubIPv4IntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'SubIPv4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSubIPv4Intfs(self):
return self.getObjects('SubIPv4Intf', self.cfgUrlBase)
def getSfpState(self,
SfpId):
obj = {
'SfpId' : int(SfpId),
}
reqUrl = self.stateUrlBase + 'Sfp'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSfpStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Sfp'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSfpStates(self):
return self.getObjects('Sfp', self.stateUrlBase)
def getPolicyDefinitionState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'PolicyDefinition'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyDefinitionStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PolicyDefinition'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyDefinitionStates(self):
return self.getObjects('PolicyDefinition', self.stateUrlBase)
def getVlanState(self,
VlanId):
obj = {
'VlanId' : int(VlanId),
}
reqUrl = self.stateUrlBase + 'Vlan'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVlanStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Vlan'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVlanStates(self):
return self.getObjects('Vlan', self.stateUrlBase)
"""
.. automethod :: executeApplyConfigByFile(self,
:param string FileName : FileName for the config to be applied FileName for the config to be applied
"""
def executeApplyConfigByFile(self,
FileName='startup-config'):
obj = {
'FileName' : FileName,
}
reqUrl = self.actionUrlBase+'ApplyConfigByFile'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getIsisGlobalState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'IsisGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIsisGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'IsisGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIsisGlobalStates(self):
return self.getObjects('IsisGlobal', self.stateUrlBase)
def getQsfpChannelPMDataState(self,
ChannelNum,
Class,
Resource,
QsfpId):
obj = {
'ChannelNum' : int(ChannelNum),
'Class' : Class,
'Resource' : Resource,
'QsfpId' : int(QsfpId),
}
reqUrl = self.stateUrlBase + 'QsfpChannelPMData'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getQsfpChannelPMDataStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'QsfpChannelPMData'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllQsfpChannelPMDataStates(self):
return self.getObjects('QsfpChannelPMData', self.stateUrlBase)
"""
.. automethod :: createBGPv6Aggregate(self,
:param string IpPrefix : IPv6 Prefix in CIDR format to match IPv6 Prefix in CIDR format to match
:param bool SendSummaryOnly : Send summary route only when aggregating routes Send summary route only when aggregating routes
:param bool GenerateASSet : Generate AS set when aggregating routes Generate AS set when aggregating routes
"""
def createBGPv6Aggregate(self,
IpPrefix,
SendSummaryOnly=False,
GenerateASSet=False):
obj = {
'IpPrefix' : IpPrefix,
'SendSummaryOnly' : True if SendSummaryOnly else False,
'GenerateASSet' : True if GenerateASSet else False,
}
reqUrl = self.cfgUrlBase+'BGPv6Aggregate'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv6Aggregate(self,
IpPrefix):
obj = {
'IpPrefix' : IpPrefix,
}
reqUrl = self.cfgUrlBase+'BGPv6Aggregate'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv6AggregateById(self, objectId ):
reqUrl = self.cfgUrlBase+'BGPv6Aggregate'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateBGPv6Aggregate(self,
IpPrefix,
SendSummaryOnly = None,
GenerateASSet = None):
obj = {}
if IpPrefix != None :
obj['IpPrefix'] = IpPrefix
if SendSummaryOnly != None :
obj['SendSummaryOnly'] = True if SendSummaryOnly else False
if GenerateASSet != None :
obj['GenerateASSet'] = True if GenerateASSet else False
reqUrl = self.cfgUrlBase+'BGPv6Aggregate'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBGPv6AggregateById(self,
objectId,
SendSummaryOnly = None,
GenerateASSet = None):
obj = {}
if SendSummaryOnly != None:
obj['SendSummaryOnly'] = SendSummaryOnly
if GenerateASSet != None:
obj['GenerateASSet'] = GenerateASSet
reqUrl = self.cfgUrlBase+'BGPv6Aggregate'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBGPv6Aggregate(self,
IpPrefix,
op,
path,
value,):
obj = {}
obj['IpPrefix'] = IpPrefix
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BGPv6Aggregate'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBGPv6Aggregate(self,
IpPrefix):
obj = {
'IpPrefix' : IpPrefix,
}
reqUrl = self.cfgUrlBase + 'BGPv6Aggregate'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv6AggregateById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BGPv6Aggregate'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv6Aggregates(self):
return self.getObjects('BGPv6Aggregate', self.cfgUrlBase)
"""
.. automethod :: executeDWDMModuleFWDownload(self,
:param uint8 ModuleId : DWDM Module identifier DWDM Module identifier
:param string FileName : Firmware file name or absolute file location Firmware file name or absolute file location
"""
def executeDWDMModuleFWDownload(self,
ModuleId,
FileName):
obj = {
'ModuleId' : int(ModuleId),
'FileName' : FileName,
}
reqUrl = self.actionUrlBase+'DWDMModuleFWDownload'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getThermalState(self,
ThermalId):
obj = {
'ThermalId' : int(ThermalId),
}
reqUrl = self.stateUrlBase + 'Thermal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getThermalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Thermal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllThermalStates(self):
return self.getObjects('Thermal', self.stateUrlBase)
"""
.. automethod :: createPolicyPrefixSet(self,
:param string Name : Policy Prefix set name. Policy Prefix set name.
:param PolicyPrefix PrefixList : List of policy prefixes part of this prefix set. List of policy prefixes part of this prefix set.
"""
def createPolicyPrefixSet(self,
Name,
PrefixList):
obj = {
'Name' : Name,
'PrefixList' : PrefixList,
}
reqUrl = self.cfgUrlBase+'PolicyPrefixSet'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyPrefixSet(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'PolicyPrefixSet'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyPrefixSetById(self, objectId ):
reqUrl = self.cfgUrlBase+'PolicyPrefixSet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePolicyPrefixSet(self,
Name,
PrefixList = None):
obj = {}
if Name != None :
obj['Name'] = Name
if PrefixList != None :
obj['PrefixList'] = PrefixList
reqUrl = self.cfgUrlBase+'PolicyPrefixSet'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePolicyPrefixSetById(self,
objectId,
PrefixList = None):
obj = {}
if PrefixList != None:
obj['PrefixList'] = PrefixList
reqUrl = self.cfgUrlBase+'PolicyPrefixSet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePolicyPrefixSet(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'PolicyPrefixSet'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPolicyPrefixSet(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'PolicyPrefixSet'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyPrefixSetById(self, objectId ):
reqUrl = self.cfgUrlBase + 'PolicyPrefixSet'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyPrefixSets(self):
return self.getObjects('PolicyPrefixSet', self.cfgUrlBase)
def updateVxlanGlobal(self,
Vrf,
AdminState = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'VxlanGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateVxlanGlobalById(self,
objectId,
AdminState = None):
obj = {}
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'VxlanGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateVxlanGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'VxlanGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getVxlanGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'VxlanGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVxlanGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'VxlanGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVxlanGlobals(self):
return self.getObjects('VxlanGlobal', self.cfgUrlBase)
def getLinkScopeIpState(self,
LinkScopeIp):
obj = {
'LinkScopeIp' : LinkScopeIp,
}
reqUrl = self.stateUrlBase + 'LinkScopeIp'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLinkScopeIpStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'LinkScopeIp'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLinkScopeIpStates(self):
return self.getObjects('LinkScopeIp', self.stateUrlBase)
def getPowerConverterSensorState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'PowerConverterSensor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPowerConverterSensorStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'PowerConverterSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPowerConverterSensorStates(self):
return self.getObjects('PowerConverterSensor', self.stateUrlBase)
"""
.. automethod :: executeArpDeleteByIfName(self,
:param string IfName : All the Arp learned for end host on given L3 interface will be deleted All the Arp learned for end host on given L3 interface will be deleted
"""
def executeArpDeleteByIfName(self,
IfName):
obj = {
'IfName' : IfName,
}
reqUrl = self.actionUrlBase+'ArpDeleteByIfName'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getVxlanGlobalState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'VxlanGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVxlanGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'VxlanGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVxlanGlobalStates(self):
return self.getObjects('VxlanGlobal', self.stateUrlBase)
"""
.. automethod :: createStpBridgeInstance(self,
:param uint16 Vlan : Each bridge is associated with a domain. Typically this domain is represented as the vlan; The default domain is 4095 Each bridge is associated with a domain. Typically this domain is represented as the vlan; The default domain is 4095
:param int32 HelloTime : The value that all bridges use for HelloTime when this bridge is acting as the root. The granularity of this timer is specified by 802.1D-1998 to be 1 second. An agent may return a badValue error if a set is attempted to a value that is not a whole number of seconds. The value that all bridges use for HelloTime when this bridge is acting as the root. The granularity of this timer is specified by 802.1D-1998 to be 1 second. An agent may return a badValue error if a set is attempted to a value that is not a whole number of seconds.
:param int32 ForwardDelay : The value that all bridges use for ForwardDelay when this bridge is acting as the root. Note that 802.1D-1998 specifies that the range for this parameter is related to the value of MaxAge. The granularity of this timer is specified by 802.1D-1998 to be 1 second. An agent may return a badValue error if a set is attempted to a value that is not a whole number of seconds. The value that all bridges use for ForwardDelay when this bridge is acting as the root. Note that 802.1D-1998 specifies that the range for this parameter is related to the value of MaxAge. The granularity of this timer is specified by 802.1D-1998 to be 1 second. An agent may return a badValue error if a set is attempted to a value that is not a whole number of seconds.
:param int32 MaxAge : The value that all bridges use for MaxAge when this bridge is acting as the root. Note that 802.1D-1998 specifies that the range for this parameter is related to the value of HelloTime. The granularity of this timer is specified by 802.1D-1998 to be 1 second. An agent may return a badValue error if a set is attempted to a value that is not a whole number of seconds. The value that all bridges use for MaxAge when this bridge is acting as the root. Note that 802.1D-1998 specifies that the range for this parameter is related to the value of HelloTime. The granularity of this timer is specified by 802.1D-1998 to be 1 second. An agent may return a badValue error if a set is attempted to a value that is not a whole number of seconds.
:param int32 TxHoldCount : Configures the number of BPDUs that can be sent before pausing for 1 second. Configures the number of BPDUs that can be sent before pausing for 1 second.
:param int32 Priority : The value of the write-able portion of the Bridge ID i.e. the first two octets of the 8 octet long Bridge ID. The other last 6 octets of the Bridge ID are given by the value of Address. On bridges supporting IEEE 802.1t or IEEE 802.1w permissible values are 0-61440 in steps of 4096. Extended Priority is enabled when the lower 12 bits are set using the Bridges VLAN id The value of the write-able portion of the Bridge ID i.e. the first two octets of the 8 octet long Bridge ID. The other last 6 octets of the Bridge ID are given by the value of Address. On bridges supporting IEEE 802.1t or IEEE 802.1w permissible values are 0-61440 in steps of 4096. Extended Priority is enabled when the lower 12 bits are set using the Bridges VLAN id
:param int32 ForceVersion : Stp Version Stp Version
:param string Address : The bridge identifier of the root of the spanning tree as determined by the Spanning Tree Protocol as executed by this node. This value is used as the Root Identifier parameter in all Configuration Bridge PDUs originated by this node. The bridge identifier of the root of the spanning tree as determined by the Spanning Tree Protocol as executed by this node. This value is used as the Root Identifier parameter in all Configuration Bridge PDUs originated by this node.
"""
def createStpBridgeInstance(self,
Vlan,
HelloTime=2,
ForwardDelay=15,
MaxAge=20,
TxHoldCount=6,
Priority=32768,
ForceVersion=2,
Address='00-00-00-00-00-00'):
obj = {
'Vlan' : int(Vlan),
'HelloTime' : int(HelloTime),
'ForwardDelay' : int(ForwardDelay),
'MaxAge' : int(MaxAge),
'TxHoldCount' : int(TxHoldCount),
'Priority' : int(Priority),
'ForceVersion' : int(ForceVersion),
'Address' : Address,
}
reqUrl = self.cfgUrlBase+'StpBridgeInstance'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteStpBridgeInstance(self,
Vlan):
obj = {
'Vlan' : Vlan,
}
reqUrl = self.cfgUrlBase+'StpBridgeInstance'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteStpBridgeInstanceById(self, objectId ):
reqUrl = self.cfgUrlBase+'StpBridgeInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateStpBridgeInstance(self,
Vlan,
HelloTime = None,
ForwardDelay = None,
MaxAge = None,
TxHoldCount = None,
Priority = None,
ForceVersion = None,
Address = None):
obj = {}
if Vlan != None :
obj['Vlan'] = int(Vlan)
if HelloTime != None :
obj['HelloTime'] = int(HelloTime)
if ForwardDelay != None :
obj['ForwardDelay'] = int(ForwardDelay)
if MaxAge != None :
obj['MaxAge'] = int(MaxAge)
if TxHoldCount != None :
obj['TxHoldCount'] = int(TxHoldCount)
if Priority != None :
obj['Priority'] = int(Priority)
if ForceVersion != None :
obj['ForceVersion'] = int(ForceVersion)
if Address != None :
obj['Address'] = Address
reqUrl = self.cfgUrlBase+'StpBridgeInstance'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateStpBridgeInstanceById(self,
objectId,
HelloTime = None,
ForwardDelay = None,
MaxAge = None,
TxHoldCount = None,
Priority = None,
ForceVersion = None,
Address = None):
obj = {}
if HelloTime != None:
obj['HelloTime'] = HelloTime
if ForwardDelay != None:
obj['ForwardDelay'] = ForwardDelay
if MaxAge != None:
obj['MaxAge'] = MaxAge
if TxHoldCount != None:
obj['TxHoldCount'] = TxHoldCount
if Priority != None:
obj['Priority'] = Priority
if ForceVersion != None:
obj['ForceVersion'] = ForceVersion
if Address != None:
obj['Address'] = Address
reqUrl = self.cfgUrlBase+'StpBridgeInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateStpBridgeInstance(self,
Vlan,
op,
path,
value,):
obj = {}
obj['Vlan'] = Vlan
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'StpBridgeInstance'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getStpBridgeInstance(self,
Vlan):
obj = {
'Vlan' : int(Vlan),
}
reqUrl = self.cfgUrlBase + 'StpBridgeInstance'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getStpBridgeInstanceById(self, objectId ):
reqUrl = self.cfgUrlBase + 'StpBridgeInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllStpBridgeInstances(self):
return self.getObjects('StpBridgeInstance', self.cfgUrlBase)
"""
.. automethod :: executeNdpRefreshByIfName(self,
:param string IfName : All the NDP learned on given L3 interface will be re-learned All the NDP learned on given L3 interface will be re-learned
"""
def executeNdpRefreshByIfName(self,
IfName):
obj = {
'IfName' : IfName,
}
reqUrl = self.actionUrlBase+'NdpRefreshByIfName'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getStpBridgeInstanceState(self,
Vlan):
obj = {
'Vlan' : int(Vlan),
}
reqUrl = self.stateUrlBase + 'StpBridgeInstance'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getStpBridgeInstanceStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'StpBridgeInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllStpBridgeInstanceStates(self):
return self.getObjects('StpBridgeInstance', self.stateUrlBase)
def getAsicSummaryState(self,
ModuleId):
obj = {
'ModuleId' : int(ModuleId),
}
reqUrl = self.stateUrlBase + 'AsicSummary'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAsicSummaryStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'AsicSummary'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAsicSummaryStates(self):
return self.getObjects('AsicSummary', self.stateUrlBase)
"""
.. automethod :: executeForceApplyConfigByFile(self,
:param string FileName : FileName for the config to be applied FileName for the config to be applied
"""
def executeForceApplyConfigByFile(self,
FileName='startup-config'):
obj = {
'FileName' : FileName,
}
reqUrl = self.actionUrlBase+'ForceApplyConfigByFile'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: executeGlobalLogging(self,
:param string Level : Logging level Logging level
"""
def executeGlobalLogging(self,
Level='info'):
obj = {
'Level' : Level,
}
reqUrl = self.actionUrlBase+'GlobalLogging'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: executeResetBGPv4NeighborByIPAddr(self,
:param string IPAddr : IP address of the BGP IPv4 neighbor to restart IP address of the BGP IPv4 neighbor to restart
"""
def executeResetBGPv4NeighborByIPAddr(self,
IPAddr):
obj = {
'IPAddr' : IPAddr,
}
reqUrl = self.actionUrlBase+'ResetBGPv4NeighborByIPAddr'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def updateIsisGlobal(self,
Vrf,
Enable = None):
obj = {}
if Vrf != None :
obj['Vrf'] = Vrf
if Enable != None :
obj['Enable'] = True if Enable else False
reqUrl = self.cfgUrlBase+'IsisGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateIsisGlobalById(self,
objectId,
Enable = None):
obj = {}
if Enable != None:
obj['Enable'] = Enable
reqUrl = self.cfgUrlBase+'IsisGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateIsisGlobal(self,
Vrf,
op,
path,
value,):
obj = {}
obj['Vrf'] = Vrf
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'IsisGlobal'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getIsisGlobal(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.cfgUrlBase + 'IsisGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIsisGlobalById(self, objectId ):
reqUrl = self.cfgUrlBase + 'IsisGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIsisGlobals(self):
return self.getObjects('IsisGlobal', self.cfgUrlBase)
"""
.. automethod :: executeNdpDeleteByIPv6Addr(self,
:param string IpAddr : End Host IPV6 Address for which corresponding NDP entry needs to be deleted End Host IPV6 Address for which corresponding NDP entry needs to be deleted
"""
def executeNdpDeleteByIPv6Addr(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.actionUrlBase+'NdpDeleteByIPv6Addr'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getLedState(self,
LedId):
obj = {
'LedId' : int(LedId),
}
reqUrl = self.stateUrlBase + 'Led'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getLedStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Led'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllLedStates(self):
return self.getObjects('Led', self.stateUrlBase)
def getOspfv2LsdbState(self,
AreaId,
LSId,
AdvRouterId,
LSType):
obj = {
'AreaId' : AreaId,
'LSId' : LSId,
'AdvRouterId' : AdvRouterId,
'LSType' : LSType,
}
reqUrl = self.stateUrlBase + 'Ospfv2Lsdb'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfv2LsdbStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Ospfv2Lsdb'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfv2LsdbStates(self):
return self.getObjects('Ospfv2Lsdb', self.stateUrlBase)
def getIPv4IntfState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'IPv4Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv4IntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'IPv4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv4IntfStates(self):
return self.getObjects('IPv4Intf', self.stateUrlBase)
def getPortState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'Port'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPortStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Port'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPortStates(self):
return self.getObjects('Port', self.stateUrlBase)
"""
.. automethod :: createOspfIfMetricEntry(self,
:param int32 IfMetricAddressLessIf : For the purpose of easing the instancing of addressed and addressless interfaces; this variable takes the value 0 on interfaces with IP addresses and the value of ifIndex for interfaces having no IP address. On row creation For the purpose of easing the instancing of addressed and addressless interfaces; this variable takes the value 0 on interfaces with IP addresses and the value of ifIndex for interfaces having no IP address. On row creation
:param int32 IfMetricTOS : The Type of Service metric being referenced. On row creation The Type of Service metric being referenced. On row creation
:param string IfMetricIpAddress : The IP address of this OSPF interface. On row creation The IP address of this OSPF interface. On row creation
:param int32 IfMetricValue : The metric of using this Type of Service on this interface. The default value of the TOS 0 metric is 10^8 / ifSpeed. The metric of using this Type of Service on this interface. The default value of the TOS 0 metric is 10^8 / ifSpeed.
"""
def createOspfIfMetricEntry(self,
IfMetricAddressLessIf,
IfMetricTOS,
IfMetricIpAddress,
IfMetricValue):
obj = {
'IfMetricAddressLessIf' : int(IfMetricAddressLessIf),
'IfMetricTOS' : int(IfMetricTOS),
'IfMetricIpAddress' : IfMetricIpAddress,
'IfMetricValue' : int(IfMetricValue),
}
reqUrl = self.cfgUrlBase+'OspfIfMetricEntry'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfIfMetricEntry(self,
IfMetricAddressLessIf,
IfMetricTOS,
IfMetricIpAddress):
obj = {
'IfMetricAddressLessIf' : IfMetricAddressLessIf,
'IfMetricTOS' : IfMetricTOS,
'IfMetricIpAddress' : IfMetricIpAddress,
}
reqUrl = self.cfgUrlBase+'OspfIfMetricEntry'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteOspfIfMetricEntryById(self, objectId ):
reqUrl = self.cfgUrlBase+'OspfIfMetricEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateOspfIfMetricEntry(self,
IfMetricAddressLessIf,
IfMetricTOS,
IfMetricIpAddress,
IfMetricValue = None):
obj = {}
if IfMetricAddressLessIf != None :
obj['IfMetricAddressLessIf'] = int(IfMetricAddressLessIf)
if IfMetricTOS != None :
obj['IfMetricTOS'] = int(IfMetricTOS)
if IfMetricIpAddress != None :
obj['IfMetricIpAddress'] = IfMetricIpAddress
if IfMetricValue != None :
obj['IfMetricValue'] = int(IfMetricValue)
reqUrl = self.cfgUrlBase+'OspfIfMetricEntry'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateOspfIfMetricEntryById(self,
objectId,
IfMetricValue = None):
obj = {}
if IfMetricValue != None:
obj['IfMetricValue'] = IfMetricValue
reqUrl = self.cfgUrlBase+'OspfIfMetricEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateOspfIfMetricEntry(self,
IfMetricAddressLessIf,
IfMetricTOS,
IfMetricIpAddress,
op,
path,
value,):
obj = {}
obj['IfMetricAddressLessIf'] = IfMetricAddressLessIf
obj['IfMetricTOS'] = IfMetricTOS
obj['IfMetricIpAddress'] = IfMetricIpAddress
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'OspfIfMetricEntry'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getOspfIfMetricEntry(self,
IfMetricAddressLessIf,
IfMetricTOS,
IfMetricIpAddress):
obj = {
'IfMetricAddressLessIf' : int(IfMetricAddressLessIf),
'IfMetricTOS' : int(IfMetricTOS),
'IfMetricIpAddress' : IfMetricIpAddress,
}
reqUrl = self.cfgUrlBase + 'OspfIfMetricEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfIfMetricEntryById(self, objectId ):
reqUrl = self.cfgUrlBase + 'OspfIfMetricEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfIfMetricEntrys(self):
return self.getObjects('OspfIfMetricEntry', self.cfgUrlBase)
"""
.. automethod :: executeAsicdClearCounters(self,
:param string IntfRef : Clear counters on given interface Clear counters on given interface
:param string Type : Clear counter for specific type like port Clear counter for specific type like port
"""
def executeAsicdClearCounters(self,
IntfRef='All',
Type='Port'):
obj = {
'IntfRef' : IntfRef,
'Type' : Type,
}
reqUrl = self.actionUrlBase+'AsicdClearCounters'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: createSfp(self,
:param int32 SfpId : SFP id SFP id
:param string AdminState : Admin PORT UP/DOWN(TX OFF) Admin PORT UP/DOWN(TX OFF)
"""
def createSfp(self,
AdminState):
obj = {
'SfpId' : int(0),
'AdminState' : AdminState,
}
reqUrl = self.cfgUrlBase+'Sfp'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteSfp(self,
SfpId):
obj = {
'SfpId' : SfpId,
}
reqUrl = self.cfgUrlBase+'Sfp'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteSfpById(self, objectId ):
reqUrl = self.cfgUrlBase+'Sfp'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateSfp(self,
SfpId,
AdminState = None):
obj = {}
if SfpId != None :
obj['SfpId'] = int(SfpId)
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'Sfp'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateSfpById(self,
objectId,
AdminState = None):
obj = {}
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'Sfp'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateSfp(self,
SfpId,
op,
path,
value,):
obj = {}
obj['SfpId'] = SfpId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Sfp'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getSfp(self,
SfpId):
obj = {
'SfpId' : int(SfpId),
}
reqUrl = self.cfgUrlBase + 'Sfp'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSfpById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Sfp'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSfps(self):
return self.getObjects('Sfp', self.cfgUrlBase)
def getDHCPRelayClientState(self,
MacAddr):
obj = {
'MacAddr' : MacAddr,
}
reqUrl = self.stateUrlBase + 'DHCPRelayClient'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPRelayClientStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DHCPRelayClient'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPRelayClientStates(self):
return self.getObjects('DHCPRelayClient', self.stateUrlBase)
def getSystemSwVersionState(self,
FlexswitchVersion):
obj = {
'FlexswitchVersion' : FlexswitchVersion,
}
reqUrl = self.stateUrlBase + 'SystemSwVersion'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSystemSwVersionStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'SystemSwVersion'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSystemSwVersionStates(self):
return self.getObjects('SystemSwVersion', self.stateUrlBase)
"""
.. automethod :: executeNdpRefreshByIPv6Addr(self,
:param string IpAddr : Neighbor's IPV6 Address for which corresponding NDP entry needs to be re-learned Neighbor's IPV6 Address for which corresponding NDP entry needs to be re-learned
"""
def executeNdpRefreshByIPv6Addr(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.actionUrlBase+'NdpRefreshByIPv6Addr'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getDaemonState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'Daemon'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDaemonStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'Daemon'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDaemonStates(self):
return self.getObjects('Daemon', self.stateUrlBase)
def getSystemParamState(self,
Vrf):
obj = {
'Vrf' : Vrf,
}
reqUrl = self.stateUrlBase + 'SystemParam'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSystemParamStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'SystemParam'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSystemParamStates(self):
return self.getObjects('SystemParam', self.stateUrlBase)
"""
.. automethod :: executeResetBfdSession(self,
:param string IpAddr : Reset BFD session to this address Reset BFD session to this address
"""
def executeResetBfdSession(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.actionUrlBase+'ResetBfdSession'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
def getBufferPortStatState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'BufferPortStat'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBufferPortStatStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BufferPortStat'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBufferPortStatStates(self):
return self.getObjects('BufferPortStat', self.stateUrlBase)
def getQsfpChannelState(self,
ChannelNum,
QsfpId):
obj = {
'ChannelNum' : int(ChannelNum),
'QsfpId' : int(QsfpId),
}
reqUrl = self.stateUrlBase + 'QsfpChannel'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getQsfpChannelStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'QsfpChannel'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllQsfpChannelStates(self):
return self.getObjects('QsfpChannel', self.stateUrlBase)
def getVoltageSensorState(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'VoltageSensor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVoltageSensorStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'VoltageSensor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVoltageSensorStates(self):
return self.getObjects('VoltageSensor', self.stateUrlBase)
def getDWDMModuleNwIntfPMState(self,
Resource,
NwIntfId,
Type,
Class,
ModuleId):
obj = {
'Resource' : Resource,
'NwIntfId' : int(NwIntfId),
'Type' : Type,
'Class' : Class,
'ModuleId' : int(ModuleId),
}
reqUrl = self.stateUrlBase + 'DWDMModuleNwIntfPM'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDWDMModuleNwIntfPMStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DWDMModuleNwIntfPM'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDWDMModuleNwIntfPMStates(self):
return self.getObjects('DWDMModuleNwIntfPM', self.stateUrlBase)
"""
.. automethod :: createDWDMModule(self,
:param uint8 ModuleId : DWDM Module identifier DWDM Module identifier
:param bool EnableExtPMTickSrc : Enable/Disable external tick source for performance monitoring Enable/Disable external tick source for performance monitoring
:param uint8 PMInterval : Performance monitoring interval Performance monitoring interval
:param string AdminState : Reset state of this dwdm module (false (Reset deasserted) Reset state of this dwdm module (false (Reset deasserted)
:param bool IndependentLaneMode : Network lane configuration for the DWDM Module. true-Independent lanes Network lane configuration for the DWDM Module. true-Independent lanes
"""
def createDWDMModule(self,
ModuleId,
EnableExtPMTickSrc=False,
PMInterval=1,
AdminState='DOWN',
IndependentLaneMode=True):
obj = {
'ModuleId' : int(ModuleId),
'EnableExtPMTickSrc' : True if EnableExtPMTickSrc else False,
'PMInterval' : int(PMInterval),
'AdminState' : AdminState,
'IndependentLaneMode' : True if IndependentLaneMode else False,
}
reqUrl = self.cfgUrlBase+'DWDMModule'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDWDMModule(self,
ModuleId):
obj = {
'ModuleId' : ModuleId,
}
reqUrl = self.cfgUrlBase+'DWDMModule'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteDWDMModuleById(self, objectId ):
reqUrl = self.cfgUrlBase+'DWDMModule'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateDWDMModule(self,
ModuleId,
EnableExtPMTickSrc = None,
PMInterval = None,
AdminState = None,
IndependentLaneMode = None):
obj = {}
if ModuleId != None :
obj['ModuleId'] = int(ModuleId)
if EnableExtPMTickSrc != None :
obj['EnableExtPMTickSrc'] = True if EnableExtPMTickSrc else False
if PMInterval != None :
obj['PMInterval'] = int(PMInterval)
if AdminState != None :
obj['AdminState'] = AdminState
if IndependentLaneMode != None :
obj['IndependentLaneMode'] = True if IndependentLaneMode else False
reqUrl = self.cfgUrlBase+'DWDMModule'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateDWDMModuleById(self,
objectId,
EnableExtPMTickSrc = None,
PMInterval = None,
AdminState = None,
IndependentLaneMode = None):
obj = {}
if EnableExtPMTickSrc != None:
obj['EnableExtPMTickSrc'] = EnableExtPMTickSrc
if PMInterval != None:
obj['PMInterval'] = PMInterval
if AdminState != None:
obj['AdminState'] = AdminState
if IndependentLaneMode != None:
obj['IndependentLaneMode'] = IndependentLaneMode
reqUrl = self.cfgUrlBase+'DWDMModule'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateDWDMModule(self,
ModuleId,
op,
path,
value,):
obj = {}
obj['ModuleId'] = ModuleId
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'DWDMModule'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getDWDMModule(self,
ModuleId):
obj = {
'ModuleId' : int(ModuleId),
}
reqUrl = self.cfgUrlBase + 'DWDMModule'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDWDMModuleById(self, objectId ):
reqUrl = self.cfgUrlBase + 'DWDMModule'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDWDMModules(self):
return self.getObjects('DWDMModule', self.cfgUrlBase)
"""
.. automethod :: executeApplyConfig(self,
"""
def executeApplyConfig(self):
obj = {
}
reqUrl = self.actionUrlBase+'ApplyConfig'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers)
return r
"""
.. automethod :: createAcl(self,
:param string AclName : Acl rule name. Rule Name should match with GroupName from AclGroup. Acl rule name. Rule Name should match with GroupName from AclGroup.
:param string IntfList : list of IntfRef can be port/lag object list of IntfRef can be port/lag object
:param string AclType : Acl type IPv4/MAC/Ipv6 Acl type IPv4/MAC/Ipv6
:param string FilterName : Filter name for acl . Filter name for acl .
:param int32 Priority : Acl priority. Acls with higher priority will have precedence over with lower. Acl priority. Acls with higher priority will have precedence over with lower.
:param string Action : Type of action (ALLOW/DENY) Type of action (ALLOW/DENY)
:param string Stage : Ingress or Egress where ACL to be applied Ingress or Egress where ACL to be applied
"""
def createAcl(self,
AclName,
IntfList,
AclType='IPv4',
FilterName='',
Priority=1,
Action='ALLOW',
Stage='IN'):
obj = {
'AclName' : AclName,
'IntfList' : IntfList,
'AclType' : AclType,
'FilterName' : FilterName,
'Priority' : int(Priority),
'Action' : Action,
'Stage' : Stage,
}
reqUrl = self.cfgUrlBase+'Acl'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAcl(self,
AclName):
obj = {
'AclName' : AclName,
}
reqUrl = self.cfgUrlBase+'Acl'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteAclById(self, objectId ):
reqUrl = self.cfgUrlBase+'Acl'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateAcl(self,
AclName,
IntfList = None,
AclType = None,
FilterName = None,
Priority = None,
Action = None,
Stage = None):
obj = {}
if AclName != None :
obj['AclName'] = AclName
if IntfList != None :
obj['IntfList'] = IntfList
if AclType != None :
obj['AclType'] = AclType
if FilterName != None :
obj['FilterName'] = FilterName
if Priority != None :
obj['Priority'] = int(Priority)
if Action != None :
obj['Action'] = Action
if Stage != None :
obj['Stage'] = Stage
reqUrl = self.cfgUrlBase+'Acl'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateAclById(self,
objectId,
IntfList = None,
AclType = None,
FilterName = None,
Priority = None,
Action = None,
Stage = None):
obj = {}
if IntfList != None:
obj['IntfList'] = IntfList
if AclType != None:
obj['AclType'] = AclType
if FilterName != None:
obj['FilterName'] = FilterName
if Priority != None:
obj['Priority'] = Priority
if Action != None:
obj['Action'] = Action
if Stage != None:
obj['Stage'] = Stage
reqUrl = self.cfgUrlBase+'Acl'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateAcl(self,
AclName,
op,
path,
value,):
obj = {}
obj['AclName'] = AclName
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Acl'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getAcl(self,
AclName):
obj = {
'AclName' : AclName,
}
reqUrl = self.cfgUrlBase + 'Acl'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAclById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Acl'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAcls(self):
return self.getObjects('Acl', self.cfgUrlBase)
"""
.. automethod :: createBGPv6Neighbor(self,
:param string IntfRef : Interface of the BGP neighbor Interface of the BGP neighbor
:param string NeighborAddress : Address of the BGP neighbor Address of the BGP neighbor
:param bool BfdEnable : Enable/Disable BFD for the BGP neighbor Enable/Disable BFD for the BGP neighbor
:param string PeerGroup : Peer group of the BGP neighbor Peer group of the BGP neighbor
:param uint8 MultiHopTTL : TTL for multi hop BGP neighbor TTL for multi hop BGP neighbor
:param string LocalAS : Local AS of the BGP neighbor Local AS of the BGP neighbor
:param uint32 KeepaliveTime : Keep alive time for the BGP neighbor Keep alive time for the BGP neighbor
:param bool AddPathsRx : Receive additional paths from BGP neighbor Receive additional paths from BGP neighbor
:param string UpdateSource : Source IP to connect to the BGP neighbor Source IP to connect to the BGP neighbor
:param bool RouteReflectorClient : Set/Clear BGP neighbor as a route reflector client Set/Clear BGP neighbor as a route reflector client
:param uint8 MaxPrefixesRestartTimer : Time in seconds to wait before we start BGP peer session when we receive max prefixes Time in seconds to wait before we start BGP peer session when we receive max prefixes
:param string Description : Description of the BGP neighbor Description of the BGP neighbor
:param bool MultiHopEnable : Enable/Disable multi hop for BGP neighbor Enable/Disable multi hop for BGP neighbor
:param uint32 RouteReflectorClusterId : Cluster Id of the internal BGP neighbor route reflector client Cluster Id of the internal BGP neighbor route reflector client
:param string AdjRIBOutFilter : Policy that is applied for Adj-RIB-Out prefix filtering Policy that is applied for Adj-RIB-Out prefix filtering
:param bool MaxPrefixesDisconnect : Disconnect the BGP peer session when we receive the max prefixes from the neighbor Disconnect the BGP peer session when we receive the max prefixes from the neighbor
:param string PeerAS : Peer AS of the BGP neighbor Peer AS of the BGP neighbor
:param uint8 AddPathsMaxTx : Max number of additional paths that can be transmitted to BGP neighbor Max number of additional paths that can be transmitted to BGP neighbor
:param string AdjRIBInFilter : Policy that is applied for Adj-RIB-In prefix filtering Policy that is applied for Adj-RIB-In prefix filtering
:param uint32 MaxPrefixes : Maximum number of prefixes that can be received from the BGP neighbor Maximum number of prefixes that can be received from the BGP neighbor
:param uint8 MaxPrefixesThresholdPct : The percentage of maximum prefixes before we start logging The percentage of maximum prefixes before we start logging
:param string BfdSessionParam : Bfd session param name to be applied Bfd session param name to be applied
:param bool NextHopSelf : Use neighbor source IP as the next hop for IBGP neighbors Use neighbor source IP as the next hop for IBGP neighbors
:param bool Disabled : Enable/Disable the BGP neighbor Enable/Disable the BGP neighbor
:param uint32 HoldTime : Hold time for the BGP neighbor Hold time for the BGP neighbor
:param uint32 ConnectRetryTime : Connect retry time to connect to BGP neighbor after disconnect Connect retry time to connect to BGP neighbor after disconnect
"""
def createBGPv6Neighbor(self,
IntfRef,
NeighborAddress,
BfdEnable=False,
PeerGroup='',
MultiHopTTL=0,
LocalAS='',
KeepaliveTime=0,
AddPathsRx=False,
UpdateSource='',
RouteReflectorClient=False,
MaxPrefixesRestartTimer=0,
Description='',
MultiHopEnable=False,
RouteReflectorClusterId=0,
AdjRIBOutFilter='',
MaxPrefixesDisconnect=False,
PeerAS='',
AddPathsMaxTx=0,
AdjRIBInFilter='',
MaxPrefixes=0,
MaxPrefixesThresholdPct=80,
BfdSessionParam='default',
NextHopSelf=False,
Disabled=False,
HoldTime=0,
ConnectRetryTime=0):
obj = {
'IntfRef' : IntfRef,
'NeighborAddress' : NeighborAddress,
'BfdEnable' : True if BfdEnable else False,
'PeerGroup' : PeerGroup,
'MultiHopTTL' : int(MultiHopTTL),
'LocalAS' : LocalAS,
'KeepaliveTime' : int(KeepaliveTime),
'AddPathsRx' : True if AddPathsRx else False,
'UpdateSource' : UpdateSource,
'RouteReflectorClient' : True if RouteReflectorClient else False,
'MaxPrefixesRestartTimer' : int(MaxPrefixesRestartTimer),
'Description' : Description,
'MultiHopEnable' : True if MultiHopEnable else False,
'RouteReflectorClusterId' : int(RouteReflectorClusterId),
'AdjRIBOutFilter' : AdjRIBOutFilter,
'MaxPrefixesDisconnect' : True if MaxPrefixesDisconnect else False,
'PeerAS' : PeerAS,
'AddPathsMaxTx' : int(AddPathsMaxTx),
'AdjRIBInFilter' : AdjRIBInFilter,
'MaxPrefixes' : int(MaxPrefixes),
'MaxPrefixesThresholdPct' : int(MaxPrefixesThresholdPct),
'BfdSessionParam' : BfdSessionParam,
'NextHopSelf' : True if NextHopSelf else False,
'Disabled' : True if Disabled else False,
'HoldTime' : int(HoldTime),
'ConnectRetryTime' : int(ConnectRetryTime),
}
reqUrl = self.cfgUrlBase+'BGPv6Neighbor'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv6Neighbor(self,
IntfRef,
NeighborAddress):
obj = {
'IntfRef' : IntfRef,
'NeighborAddress' : NeighborAddress,
}
reqUrl = self.cfgUrlBase+'BGPv6Neighbor'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv6NeighborById(self, objectId ):
reqUrl = self.cfgUrlBase+'BGPv6Neighbor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateBGPv6Neighbor(self,
IntfRef,
NeighborAddress,
BfdEnable = None,
PeerGroup = None,
MultiHopTTL = None,
LocalAS = None,
KeepaliveTime = None,
AddPathsRx = None,
UpdateSource = None,
RouteReflectorClient = None,
MaxPrefixesRestartTimer = None,
Description = None,
MultiHopEnable = None,
RouteReflectorClusterId = None,
AdjRIBOutFilter = None,
MaxPrefixesDisconnect = None,
PeerAS = None,
AddPathsMaxTx = None,
AdjRIBInFilter = None,
MaxPrefixes = None,
MaxPrefixesThresholdPct = None,
BfdSessionParam = None,
NextHopSelf = None,
Disabled = None,
HoldTime = None,
ConnectRetryTime = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if NeighborAddress != None :
obj['NeighborAddress'] = NeighborAddress
if BfdEnable != None :
obj['BfdEnable'] = True if BfdEnable else False
if PeerGroup != None :
obj['PeerGroup'] = PeerGroup
if MultiHopTTL != None :
obj['MultiHopTTL'] = int(MultiHopTTL)
if LocalAS != None :
obj['LocalAS'] = LocalAS
if KeepaliveTime != None :
obj['KeepaliveTime'] = int(KeepaliveTime)
if AddPathsRx != None :
obj['AddPathsRx'] = True if AddPathsRx else False
if UpdateSource != None :
obj['UpdateSource'] = UpdateSource
if RouteReflectorClient != None :
obj['RouteReflectorClient'] = True if RouteReflectorClient else False
if MaxPrefixesRestartTimer != None :
obj['MaxPrefixesRestartTimer'] = int(MaxPrefixesRestartTimer)
if Description != None :
obj['Description'] = Description
if MultiHopEnable != None :
obj['MultiHopEnable'] = True if MultiHopEnable else False
if RouteReflectorClusterId != None :
obj['RouteReflectorClusterId'] = int(RouteReflectorClusterId)
if AdjRIBOutFilter != None :
obj['AdjRIBOutFilter'] = AdjRIBOutFilter
if MaxPrefixesDisconnect != None :
obj['MaxPrefixesDisconnect'] = True if MaxPrefixesDisconnect else False
if PeerAS != None :
obj['PeerAS'] = PeerAS
if AddPathsMaxTx != None :
obj['AddPathsMaxTx'] = int(AddPathsMaxTx)
if AdjRIBInFilter != None :
obj['AdjRIBInFilter'] = AdjRIBInFilter
if MaxPrefixes != None :
obj['MaxPrefixes'] = int(MaxPrefixes)
if MaxPrefixesThresholdPct != None :
obj['MaxPrefixesThresholdPct'] = int(MaxPrefixesThresholdPct)
if BfdSessionParam != None :
obj['BfdSessionParam'] = BfdSessionParam
if NextHopSelf != None :
obj['NextHopSelf'] = True if NextHopSelf else False
if Disabled != None :
obj['Disabled'] = True if Disabled else False
if HoldTime != None :
obj['HoldTime'] = int(HoldTime)
if ConnectRetryTime != None :
obj['ConnectRetryTime'] = int(ConnectRetryTime)
reqUrl = self.cfgUrlBase+'BGPv6Neighbor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBGPv6NeighborById(self,
objectId,
BfdEnable = None,
PeerGroup = None,
MultiHopTTL = None,
LocalAS = None,
KeepaliveTime = None,
AddPathsRx = None,
UpdateSource = None,
RouteReflectorClient = None,
MaxPrefixesRestartTimer = None,
Description = None,
MultiHopEnable = None,
RouteReflectorClusterId = None,
AdjRIBOutFilter = None,
MaxPrefixesDisconnect = None,
PeerAS = None,
AddPathsMaxTx = None,
AdjRIBInFilter = None,
MaxPrefixes = None,
MaxPrefixesThresholdPct = None,
BfdSessionParam = None,
NextHopSelf = None,
Disabled = None,
HoldTime = None,
ConnectRetryTime = None):
obj = {}
if BfdEnable != None:
obj['BfdEnable'] = BfdEnable
if PeerGroup != None:
obj['PeerGroup'] = PeerGroup
if MultiHopTTL != None:
obj['MultiHopTTL'] = MultiHopTTL
if LocalAS != None:
obj['LocalAS'] = LocalAS
if KeepaliveTime != None:
obj['KeepaliveTime'] = KeepaliveTime
if AddPathsRx != None:
obj['AddPathsRx'] = AddPathsRx
if UpdateSource != None:
obj['UpdateSource'] = UpdateSource
if RouteReflectorClient != None:
obj['RouteReflectorClient'] = RouteReflectorClient
if MaxPrefixesRestartTimer != None:
obj['MaxPrefixesRestartTimer'] = MaxPrefixesRestartTimer
if Description != None:
obj['Description'] = Description
if MultiHopEnable != None:
obj['MultiHopEnable'] = MultiHopEnable
if RouteReflectorClusterId != None:
obj['RouteReflectorClusterId'] = RouteReflectorClusterId
if AdjRIBOutFilter != None:
obj['AdjRIBOutFilter'] = AdjRIBOutFilter
if MaxPrefixesDisconnect != None:
obj['MaxPrefixesDisconnect'] = MaxPrefixesDisconnect
if PeerAS != None:
obj['PeerAS'] = PeerAS
if AddPathsMaxTx != None:
obj['AddPathsMaxTx'] = AddPathsMaxTx
if AdjRIBInFilter != None:
obj['AdjRIBInFilter'] = AdjRIBInFilter
if MaxPrefixes != None:
obj['MaxPrefixes'] = MaxPrefixes
if MaxPrefixesThresholdPct != None:
obj['MaxPrefixesThresholdPct'] = MaxPrefixesThresholdPct
if BfdSessionParam != None:
obj['BfdSessionParam'] = BfdSessionParam
if NextHopSelf != None:
obj['NextHopSelf'] = NextHopSelf
if Disabled != None:
obj['Disabled'] = Disabled
if HoldTime != None:
obj['HoldTime'] = HoldTime
if ConnectRetryTime != None:
obj['ConnectRetryTime'] = ConnectRetryTime
reqUrl = self.cfgUrlBase+'BGPv6Neighbor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBGPv6Neighbor(self,
IntfRef,
NeighborAddress,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['NeighborAddress'] = NeighborAddress
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BGPv6Neighbor'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBGPv6Neighbor(self,
IntfRef,
NeighborAddress):
obj = {
'IntfRef' : IntfRef,
'NeighborAddress' : NeighborAddress,
}
reqUrl = self.cfgUrlBase + 'BGPv6Neighbor'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv6NeighborById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BGPv6Neighbor'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv6Neighbors(self):
return self.getObjects('BGPv6Neighbor', self.cfgUrlBase)
"""
.. automethod :: createStpPort(self,
:param int32 Vlan : The value of instance of the vlan object The value of instance of the vlan object
:param string IntfRef : The port number of the port for which this entry contains Spanning Tree Protocol management information. The port number of the port for which this entry contains Spanning Tree Protocol management information.
:param int32 PathCost : The contribution of this port to the path cost of paths towards the spanning tree root which include this port. 802.1D-1998 recommends that the default value of this parameter be in inverse proportion to the speed of the attached LAN. New implementations should support PathCost32. If the port path costs exceeds the maximum value of this object then this object should report the maximum value; namely 65535. Applications should try to read the PathCost32 object if this object reports the maximum value. Value of 1 will force node to auto discover the value based on the ports capabilities. The contribution of this port to the path cost of paths towards the spanning tree root which include this port. 802.1D-1998 recommends that the default value of this parameter be in inverse proportion to the speed of the attached LAN. New implementations should support PathCost32. If the port path costs exceeds the maximum value of this object then this object should report the maximum value; namely 65535. Applications should try to read the PathCost32 object if this object reports the maximum value. Value of 1 will force node to auto discover the value based on the ports capabilities.
:param int32 AdminEdgePort : The administrative value of the Edge Port parameter. A value of true(1) indicates that this port should be assumed as an edge-port and a value of false(2) indicates that this port should be assumed as a non-edge-port. Setting this object will also cause the corresponding instance of OperEdgePort to change to the same value. Note that even when this object's value is true the value of the corresponding instance of OperEdgePort can be false if a BPDU has been received. The value of this object MUST be retained across reinitializations of the management system. The administrative value of the Edge Port parameter. A value of true(1) indicates that this port should be assumed as an edge-port and a value of false(2) indicates that this port should be assumed as a non-edge-port. Setting this object will also cause the corresponding instance of OperEdgePort to change to the same value. Note that even when this object's value is true the value of the corresponding instance of OperEdgePort can be false if a BPDU has been received. The value of this object MUST be retained across reinitializations of the management system.
:param int32 ProtocolMigration : When operating in RSTP (version 2) mode writing true(1) to this object forces this port to transmit RSTP BPDUs. Any other operation on this object has no effect and it always returns false(2) when read. When operating in RSTP (version 2) mode writing true(1) to this object forces this port to transmit RSTP BPDUs. Any other operation on this object has no effect and it always returns false(2) when read.
:param int32 BridgeAssurance : When enabled BPDUs will be transmitted out of all stp ports regardless of state. When an stp port fails to receive a BPDU the port should transition to a Blocked state. Upon reception of BDPU after shutdown should transition port into the bridge. When enabled BPDUs will be transmitted out of all stp ports regardless of state. When an stp port fails to receive a BPDU the port should transition to a Blocked state. Upon reception of BDPU after shutdown should transition port into the bridge.
:param int32 Priority : The value of the priority field that is contained in the first in network byte order octet of the 2 octet long Port ID. The other octet of the Port ID is given by the value of StpPort. On bridges supporting IEEE 802.1t or IEEE 802.1w The value of the priority field that is contained in the first in network byte order octet of the 2 octet long Port ID. The other octet of the Port ID is given by the value of StpPort. On bridges supporting IEEE 802.1t or IEEE 802.1w
:param string AdminState : The enabled/disabled status of the port. The enabled/disabled status of the port.
:param int32 BpduGuard : A Port as OperEdge which receives BPDU with BpduGuard enabled will shut the port down. A Port as OperEdge which receives BPDU with BpduGuard enabled will shut the port down.
:param int32 AdminPointToPoint : The administrative point-to-point status of the LAN segment attached to this port using the enumeration values of the IEEE 802.1w clause. A value of forceTrue(0) indicates that this port should always be treated as if it is connected to a point-to-point link. A value of forceFalse(1) indicates that this port should be treated as having a shared media connection. A value of auto(2) indicates that this port is considered to have a point-to-point link if it is an Aggregator and all of its members are aggregatable or if the MAC entity is configured for full duplex operation The administrative point-to-point status of the LAN segment attached to this port using the enumeration values of the IEEE 802.1w clause. A value of forceTrue(0) indicates that this port should always be treated as if it is connected to a point-to-point link. A value of forceFalse(1) indicates that this port should be treated as having a shared media connection. A value of auto(2) indicates that this port is considered to have a point-to-point link if it is an Aggregator and all of its members are aggregatable or if the MAC entity is configured for full duplex operation
:param int32 BpduGuardInterval : The interval time to which a port will try to recover from BPDU Guard err-disable state. If no BPDU frames are detected after this timeout plus 3 Times Hello Time then the port will transition back to Up state. If condition is cleared manually then this operation is ignored. If set to zero then timer is inactive and recovery is based on manual intervention. The interval time to which a port will try to recover from BPDU Guard err-disable state. If no BPDU frames are detected after this timeout plus 3 Times Hello Time then the port will transition back to Up state. If condition is cleared manually then this operation is ignored. If set to zero then timer is inactive and recovery is based on manual intervention.
:param int32 AdminPathCost : The administratively assigned value for the contribution of this port to the path cost of paths toward the spanning tree root. Writing a value of '0' assigns the automatically calculated default Path Cost value to the port. If the default Path Cost is being used this object returns '0' when read. This complements the object PathCost or PathCost32 which returns the operational value of the path cost. The value of this object MUST be retained across reinitializations of the management system. The administratively assigned value for the contribution of this port to the path cost of paths toward the spanning tree root. Writing a value of '0' assigns the automatically calculated default Path Cost value to the port. If the default Path Cost is being used this object returns '0' when read. This complements the object PathCost or PathCost32 which returns the operational value of the path cost. The value of this object MUST be retained across reinitializations of the management system.
:param int32 PathCost32 : The contribution of this port to the path cost of paths towards the spanning tree root which include this port. 802.1D-1998 recommends that the default value of this parameter be in inverse proportion to the speed of the attached LAN. This object replaces PathCost to support IEEE 802.1t. Value of 1 will force node to auto discover the value based on the ports capabilities. The contribution of this port to the path cost of paths towards the spanning tree root which include this port. 802.1D-1998 recommends that the default value of this parameter be in inverse proportion to the speed of the attached LAN. This object replaces PathCost to support IEEE 802.1t. Value of 1 will force node to auto discover the value based on the ports capabilities.
"""
def createStpPort(self,
Vlan,
IntfRef,
PathCost=1,
AdminEdgePort=2,
ProtocolMigration=1,
BridgeAssurance=2,
Priority=128,
AdminState='UP',
BpduGuard=2,
AdminPointToPoint=2,
BpduGuardInterval=15,
AdminPathCost=200000,
PathCost32=1):
obj = {
'Vlan' : int(Vlan),
'IntfRef' : IntfRef,
'PathCost' : int(PathCost),
'AdminEdgePort' : int(AdminEdgePort),
'ProtocolMigration' : int(ProtocolMigration),
'BridgeAssurance' : int(BridgeAssurance),
'Priority' : int(Priority),
'AdminState' : AdminState,
'BpduGuard' : int(BpduGuard),
'AdminPointToPoint' : int(AdminPointToPoint),
'BpduGuardInterval' : int(BpduGuardInterval),
'AdminPathCost' : int(AdminPathCost),
'PathCost32' : int(PathCost32),
}
reqUrl = self.cfgUrlBase+'StpPort'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteStpPort(self,
Vlan,
IntfRef):
obj = {
'Vlan' : Vlan,
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'StpPort'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteStpPortById(self, objectId ):
reqUrl = self.cfgUrlBase+'StpPort'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateStpPort(self,
Vlan,
IntfRef,
PathCost = None,
AdminEdgePort = None,
ProtocolMigration = None,
BridgeAssurance = None,
Priority = None,
AdminState = None,
BpduGuard = None,
AdminPointToPoint = None,
BpduGuardInterval = None,
AdminPathCost = None,
PathCost32 = None):
obj = {}
if Vlan != None :
obj['Vlan'] = int(Vlan)
if IntfRef != None :
obj['IntfRef'] = IntfRef
if PathCost != None :
obj['PathCost'] = int(PathCost)
if AdminEdgePort != None :
obj['AdminEdgePort'] = int(AdminEdgePort)
if ProtocolMigration != None :
obj['ProtocolMigration'] = int(ProtocolMigration)
if BridgeAssurance != None :
obj['BridgeAssurance'] = int(BridgeAssurance)
if Priority != None :
obj['Priority'] = int(Priority)
if AdminState != None :
obj['AdminState'] = AdminState
if BpduGuard != None :
obj['BpduGuard'] = int(BpduGuard)
if AdminPointToPoint != None :
obj['AdminPointToPoint'] = int(AdminPointToPoint)
if BpduGuardInterval != None :
obj['BpduGuardInterval'] = int(BpduGuardInterval)
if AdminPathCost != None :
obj['AdminPathCost'] = int(AdminPathCost)
if PathCost32 != None :
obj['PathCost32'] = int(PathCost32)
reqUrl = self.cfgUrlBase+'StpPort'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateStpPortById(self,
objectId,
PathCost = None,
AdminEdgePort = None,
ProtocolMigration = None,
BridgeAssurance = None,
Priority = None,
AdminState = None,
BpduGuard = None,
AdminPointToPoint = None,
BpduGuardInterval = None,
AdminPathCost = None,
PathCost32 = None):
obj = {}
if PathCost != None:
obj['PathCost'] = PathCost
if AdminEdgePort != None:
obj['AdminEdgePort'] = AdminEdgePort
if ProtocolMigration != None:
obj['ProtocolMigration'] = ProtocolMigration
if BridgeAssurance != None:
obj['BridgeAssurance'] = BridgeAssurance
if Priority != None:
obj['Priority'] = Priority
if AdminState != None:
obj['AdminState'] = AdminState
if BpduGuard != None:
obj['BpduGuard'] = BpduGuard
if AdminPointToPoint != None:
obj['AdminPointToPoint'] = AdminPointToPoint
if BpduGuardInterval != None:
obj['BpduGuardInterval'] = BpduGuardInterval
if AdminPathCost != None:
obj['AdminPathCost'] = AdminPathCost
if PathCost32 != None:
obj['PathCost32'] = PathCost32
reqUrl = self.cfgUrlBase+'StpPort'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateStpPort(self,
Vlan,
IntfRef,
op,
path,
value,):
obj = {}
obj['Vlan'] = Vlan
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'StpPort'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getStpPort(self,
Vlan,
IntfRef):
obj = {
'Vlan' : int(Vlan),
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'StpPort'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getStpPortById(self, objectId ):
reqUrl = self.cfgUrlBase + 'StpPort'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllStpPorts(self):
return self.getObjects('StpPort', self.cfgUrlBase)
def getVrrpV4IntfState(self,
IntfRef,
VRID):
obj = {
'IntfRef' : IntfRef,
'VRID' : int(VRID),
}
reqUrl = self.stateUrlBase + 'VrrpV4Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVrrpV4IntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'VrrpV4Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVrrpV4IntfStates(self):
return self.getObjects('VrrpV4Intf', self.stateUrlBase)
"""
.. automethod :: createIPv4Route(self,
:param string DestinationNw : IP address of the route IP address of the route
:param string NetworkMask : mask of the route mask of the route
:param NextHopInfo NextHop :
:param string Protocol : Protocol type of the route Protocol type of the route
:param bool NullRoute : Specify if this is a null route Specify if this is a null route
:param uint32 Cost : Cost of this route Cost of this route
"""
def createIPv4Route(self,
DestinationNw,
NetworkMask,
NextHop,
Protocol='STATIC',
NullRoute=False,
Cost=0):
obj = {
'DestinationNw' : DestinationNw,
'NetworkMask' : NetworkMask,
'NextHop' : NextHop,
'Protocol' : Protocol,
'NullRoute' : True if NullRoute else False,
'Cost' : int(Cost),
}
reqUrl = self.cfgUrlBase+'IPv4Route'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIPv4Route(self,
DestinationNw,
NetworkMask):
obj = {
'DestinationNw' : DestinationNw,
'NetworkMask' : NetworkMask,
}
reqUrl = self.cfgUrlBase+'IPv4Route'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIPv4RouteById(self, objectId ):
reqUrl = self.cfgUrlBase+'IPv4Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateIPv4Route(self,
DestinationNw,
NetworkMask,
NextHop = None,
Protocol = None,
NullRoute = None,
Cost = None):
obj = {}
if DestinationNw != None :
obj['DestinationNw'] = DestinationNw
if NetworkMask != None :
obj['NetworkMask'] = NetworkMask
if NextHop != None :
obj['NextHop'] = NextHop
if Protocol != None :
obj['Protocol'] = Protocol
if NullRoute != None :
obj['NullRoute'] = True if NullRoute else False
if Cost != None :
obj['Cost'] = int(Cost)
reqUrl = self.cfgUrlBase+'IPv4Route'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateIPv4RouteById(self,
objectId,
NextHop = None,
Protocol = None,
NullRoute = None,
Cost = None):
obj = {}
if NextHop != None:
obj['NextHop'] = NextHop
if Protocol != None:
obj['Protocol'] = Protocol
if NullRoute != None:
obj['NullRoute'] = NullRoute
if Cost != None:
obj['Cost'] = Cost
reqUrl = self.cfgUrlBase+'IPv4Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateIPv4Route(self,
DestinationNw,
NetworkMask,
op,
path,
value,):
obj = {}
obj['DestinationNw'] = DestinationNw
obj['NetworkMask'] = NetworkMask
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'IPv4Route'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getIPv4Route(self,
DestinationNw,
NetworkMask):
obj = {
'DestinationNw' : DestinationNw,
'NetworkMask' : NetworkMask,
}
reqUrl = self.cfgUrlBase + 'IPv4Route'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv4RouteById(self, objectId ):
reqUrl = self.cfgUrlBase + 'IPv4Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv4Routes(self):
return self.getObjects('IPv4Route', self.cfgUrlBase)
"""
.. automethod :: createBGPv6PeerGroup(self,
:param string Name : Name of the BGP peer group Name of the BGP peer group
:param uint8 MaxPrefixesRestartTimer : Time to wait before we start BGP peer session when we receive max prefixes Time to wait before we start BGP peer session when we receive max prefixes
:param bool MultiHopEnable : Enable/Disable multi hop for BGP neighbor Enable/Disable multi hop for BGP neighbor
:param string Description : Description of the BGP neighbor Description of the BGP neighbor
:param bool NextHopSelf : Use neighbor source IP as the next hop for IBGP neighbors Use neighbor source IP as the next hop for IBGP neighbors
:param string AdjRIBOutFilter : Policy that is applied for Adj-RIB-Out prefix filtering Policy that is applied for Adj-RIB-Out prefix filtering
:param bool MaxPrefixesDisconnect : Disconnect the BGP peer session when we receive the max prefixes from the neighbor Disconnect the BGP peer session when we receive the max prefixes from the neighbor
:param string LocalAS : Local AS of the BGP neighbor Local AS of the BGP neighbor
:param uint8 MultiHopTTL : TTL for multi hop BGP neighbor TTL for multi hop BGP neighbor
:param uint32 KeepaliveTime : Keep alive time for the BGP neighbor Keep alive time for the BGP neighbor
:param uint32 RouteReflectorClusterId : Cluster Id of the internal BGP neighbor route reflector client Cluster Id of the internal BGP neighbor route reflector client
:param uint8 AddPathsMaxTx : Max number of additional paths that can be transmitted to BGP neighbor Max number of additional paths that can be transmitted to BGP neighbor
:param string AdjRIBInFilter : Policy that is applied for Adj-RIB-In prefix filtering Policy that is applied for Adj-RIB-In prefix filtering
:param bool AddPathsRx : Receive additional paths from BGP neighbor Receive additional paths from BGP neighbor
:param string UpdateSource : Source IP to connect to the BGP neighbor Source IP to connect to the BGP neighbor
:param bool RouteReflectorClient : Set/Clear BGP neighbor as a route reflector client Set/Clear BGP neighbor as a route reflector client
:param uint8 MaxPrefixesThresholdPct : The percentage of maximum prefixes before we start logging The percentage of maximum prefixes before we start logging
:param uint32 HoldTime : Hold time for the BGP neighbor Hold time for the BGP neighbor
:param uint32 MaxPrefixes : Maximum number of prefixes that can be received from the BGP neighbor Maximum number of prefixes that can be received from the BGP neighbor
:param string PeerAS : Peer AS of the BGP neighbor Peer AS of the BGP neighbor
:param uint32 ConnectRetryTime : Connect retry time to connect to BGP neighbor after disconnect Connect retry time to connect to BGP neighbor after disconnect
"""
def createBGPv6PeerGroup(self,
Name,
MaxPrefixesRestartTimer=0,
MultiHopEnable=False,
Description='',
NextHopSelf=False,
AdjRIBOutFilter='',
MaxPrefixesDisconnect=False,
LocalAS='',
MultiHopTTL=0,
KeepaliveTime=0,
RouteReflectorClusterId=0,
AddPathsMaxTx=0,
AdjRIBInFilter='',
AddPathsRx=False,
UpdateSource='',
RouteReflectorClient=False,
MaxPrefixesThresholdPct=80,
HoldTime=0,
MaxPrefixes=0,
PeerAS='',
ConnectRetryTime=0):
obj = {
'Name' : Name,
'MaxPrefixesRestartTimer' : int(MaxPrefixesRestartTimer),
'MultiHopEnable' : True if MultiHopEnable else False,
'Description' : Description,
'NextHopSelf' : True if NextHopSelf else False,
'AdjRIBOutFilter' : AdjRIBOutFilter,
'MaxPrefixesDisconnect' : True if MaxPrefixesDisconnect else False,
'LocalAS' : LocalAS,
'MultiHopTTL' : int(MultiHopTTL),
'KeepaliveTime' : int(KeepaliveTime),
'RouteReflectorClusterId' : int(RouteReflectorClusterId),
'AddPathsMaxTx' : int(AddPathsMaxTx),
'AdjRIBInFilter' : AdjRIBInFilter,
'AddPathsRx' : True if AddPathsRx else False,
'UpdateSource' : UpdateSource,
'RouteReflectorClient' : True if RouteReflectorClient else False,
'MaxPrefixesThresholdPct' : int(MaxPrefixesThresholdPct),
'HoldTime' : int(HoldTime),
'MaxPrefixes' : int(MaxPrefixes),
'PeerAS' : PeerAS,
'ConnectRetryTime' : int(ConnectRetryTime),
}
reqUrl = self.cfgUrlBase+'BGPv6PeerGroup'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv6PeerGroup(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'BGPv6PeerGroup'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv6PeerGroupById(self, objectId ):
reqUrl = self.cfgUrlBase+'BGPv6PeerGroup'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateBGPv6PeerGroup(self,
Name,
MaxPrefixesRestartTimer = None,
MultiHopEnable = None,
Description = None,
NextHopSelf = None,
AdjRIBOutFilter = None,
MaxPrefixesDisconnect = None,
LocalAS = None,
MultiHopTTL = None,
KeepaliveTime = None,
RouteReflectorClusterId = None,
AddPathsMaxTx = None,
AdjRIBInFilter = None,
AddPathsRx = None,
UpdateSource = None,
RouteReflectorClient = None,
MaxPrefixesThresholdPct = None,
HoldTime = None,
MaxPrefixes = None,
PeerAS = None,
ConnectRetryTime = None):
obj = {}
if Name != None :
obj['Name'] = Name
if MaxPrefixesRestartTimer != None :
obj['MaxPrefixesRestartTimer'] = int(MaxPrefixesRestartTimer)
if MultiHopEnable != None :
obj['MultiHopEnable'] = True if MultiHopEnable else False
if Description != None :
obj['Description'] = Description
if NextHopSelf != None :
obj['NextHopSelf'] = True if NextHopSelf else False
if AdjRIBOutFilter != None :
obj['AdjRIBOutFilter'] = AdjRIBOutFilter
if MaxPrefixesDisconnect != None :
obj['MaxPrefixesDisconnect'] = True if MaxPrefixesDisconnect else False
if LocalAS != None :
obj['LocalAS'] = LocalAS
if MultiHopTTL != None :
obj['MultiHopTTL'] = int(MultiHopTTL)
if KeepaliveTime != None :
obj['KeepaliveTime'] = int(KeepaliveTime)
if RouteReflectorClusterId != None :
obj['RouteReflectorClusterId'] = int(RouteReflectorClusterId)
if AddPathsMaxTx != None :
obj['AddPathsMaxTx'] = int(AddPathsMaxTx)
if AdjRIBInFilter != None :
obj['AdjRIBInFilter'] = AdjRIBInFilter
if AddPathsRx != None :
obj['AddPathsRx'] = True if AddPathsRx else False
if UpdateSource != None :
obj['UpdateSource'] = UpdateSource
if RouteReflectorClient != None :
obj['RouteReflectorClient'] = True if RouteReflectorClient else False
if MaxPrefixesThresholdPct != None :
obj['MaxPrefixesThresholdPct'] = int(MaxPrefixesThresholdPct)
if HoldTime != None :
obj['HoldTime'] = int(HoldTime)
if MaxPrefixes != None :
obj['MaxPrefixes'] = int(MaxPrefixes)
if PeerAS != None :
obj['PeerAS'] = PeerAS
if ConnectRetryTime != None :
obj['ConnectRetryTime'] = int(ConnectRetryTime)
reqUrl = self.cfgUrlBase+'BGPv6PeerGroup'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBGPv6PeerGroupById(self,
objectId,
MaxPrefixesRestartTimer = None,
MultiHopEnable = None,
Description = None,
NextHopSelf = None,
AdjRIBOutFilter = None,
MaxPrefixesDisconnect = None,
LocalAS = None,
MultiHopTTL = None,
KeepaliveTime = None,
RouteReflectorClusterId = None,
AddPathsMaxTx = None,
AdjRIBInFilter = None,
AddPathsRx = None,
UpdateSource = None,
RouteReflectorClient = None,
MaxPrefixesThresholdPct = None,
HoldTime = None,
MaxPrefixes = None,
PeerAS = None,
ConnectRetryTime = None):
obj = {}
if MaxPrefixesRestartTimer != None:
obj['MaxPrefixesRestartTimer'] = MaxPrefixesRestartTimer
if MultiHopEnable != None:
obj['MultiHopEnable'] = MultiHopEnable
if Description != None:
obj['Description'] = Description
if NextHopSelf != None:
obj['NextHopSelf'] = NextHopSelf
if AdjRIBOutFilter != None:
obj['AdjRIBOutFilter'] = AdjRIBOutFilter
if MaxPrefixesDisconnect != None:
obj['MaxPrefixesDisconnect'] = MaxPrefixesDisconnect
if LocalAS != None:
obj['LocalAS'] = LocalAS
if MultiHopTTL != None:
obj['MultiHopTTL'] = MultiHopTTL
if KeepaliveTime != None:
obj['KeepaliveTime'] = KeepaliveTime
if RouteReflectorClusterId != None:
obj['RouteReflectorClusterId'] = RouteReflectorClusterId
if AddPathsMaxTx != None:
obj['AddPathsMaxTx'] = AddPathsMaxTx
if AdjRIBInFilter != None:
obj['AdjRIBInFilter'] = AdjRIBInFilter
if AddPathsRx != None:
obj['AddPathsRx'] = AddPathsRx
if UpdateSource != None:
obj['UpdateSource'] = UpdateSource
if RouteReflectorClient != None:
obj['RouteReflectorClient'] = RouteReflectorClient
if MaxPrefixesThresholdPct != None:
obj['MaxPrefixesThresholdPct'] = MaxPrefixesThresholdPct
if HoldTime != None:
obj['HoldTime'] = HoldTime
if MaxPrefixes != None:
obj['MaxPrefixes'] = MaxPrefixes
if PeerAS != None:
obj['PeerAS'] = PeerAS
if ConnectRetryTime != None:
obj['ConnectRetryTime'] = ConnectRetryTime
reqUrl = self.cfgUrlBase+'BGPv6PeerGroup'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBGPv6PeerGroup(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BGPv6PeerGroup'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBGPv6PeerGroup(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'BGPv6PeerGroup'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv6PeerGroupById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BGPv6PeerGroup'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv6PeerGroups(self):
return self.getObjects('BGPv6PeerGroup', self.cfgUrlBase)
def getDHCPv6RelayIntfState(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.stateUrlBase + 'DHCPv6RelayIntf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getDHCPv6RelayIntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'DHCPv6RelayIntf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllDHCPv6RelayIntfStates(self):
return self.getObjects('DHCPv6RelayIntf', self.stateUrlBase)
def getArpEntryHwState(self,
IpAddr):
obj = {
'IpAddr' : IpAddr,
}
reqUrl = self.stateUrlBase + 'ArpEntryHw'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getArpEntryHwStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'ArpEntryHw'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllArpEntryHwStates(self):
return self.getObjects('ArpEntryHw', self.stateUrlBase)
def getOspfGlobalState(self,
RouterId):
obj = {
'RouterId' : RouterId,
}
reqUrl = self.stateUrlBase + 'OspfGlobal'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfGlobalStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'OspfGlobal'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfGlobalStates(self):
return self.getObjects('OspfGlobal', self.stateUrlBase)
def getTemperatureSensorPMDataState(self,
Class,
Name):
obj = {
'Class' : Class,
'Name' : Name,
}
reqUrl = self.stateUrlBase + 'TemperatureSensorPMData'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getTemperatureSensorPMDataStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'TemperatureSensorPMData'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllTemperatureSensorPMDataStates(self):
return self.getObjects('TemperatureSensorPMData', self.stateUrlBase)
"""
.. automethod :: createIPv6Intf(self,
:param string IntfRef : Interface name or ifindex of port/lag or vlan on which this IPv4 object is configured Interface name or ifindex of port/lag or vlan on which this IPv4 object is configured
:param string AdminState : Administrative state of this IP interface Administrative state of this IP interface
:param string IpAddr : Interface Global Scope IP Address/Prefix-Length to provision on switch interface Interface Global Scope IP Address/Prefix-Length to provision on switch interface
:param bool LinkIp : Interface Link Scope IP Address auto-configured Interface Link Scope IP Address auto-configured
"""
def createIPv6Intf(self,
IntfRef,
AdminState='UP',
IpAddr='',
LinkIp=True):
obj = {
'IntfRef' : IntfRef,
'AdminState' : AdminState,
'IpAddr' : IpAddr,
'LinkIp' : True if LinkIp else False,
}
reqUrl = self.cfgUrlBase+'IPv6Intf'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIPv6Intf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'IPv6Intf'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteIPv6IntfById(self, objectId ):
reqUrl = self.cfgUrlBase+'IPv6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateIPv6Intf(self,
IntfRef,
AdminState = None,
IpAddr = None,
LinkIp = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if AdminState != None :
obj['AdminState'] = AdminState
if IpAddr != None :
obj['IpAddr'] = IpAddr
if LinkIp != None :
obj['LinkIp'] = True if LinkIp else False
reqUrl = self.cfgUrlBase+'IPv6Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateIPv6IntfById(self,
objectId,
AdminState = None,
IpAddr = None,
LinkIp = None):
obj = {}
if AdminState != None:
obj['AdminState'] = AdminState
if IpAddr != None:
obj['IpAddr'] = IpAddr
if LinkIp != None:
obj['LinkIp'] = LinkIp
reqUrl = self.cfgUrlBase+'IPv6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateIPv6Intf(self,
IntfRef,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'IPv6Intf'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getIPv6Intf(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'IPv6Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getIPv6IntfById(self, objectId ):
reqUrl = self.cfgUrlBase + 'IPv6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllIPv6Intfs(self):
return self.getObjects('IPv6Intf', self.cfgUrlBase)
def getRouteStatsPerProtocolState(self,
Protocol):
obj = {
'Protocol' : Protocol,
}
reqUrl = self.stateUrlBase + 'RouteStatsPerProtocol'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getRouteStatsPerProtocolStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'RouteStatsPerProtocol'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllRouteStatsPerProtocolStates(self):
return self.getObjects('RouteStatsPerProtocol', self.stateUrlBase)
"""
.. automethod :: createVxlanInstance(self,
:param uint32 Vni : VXLAN Network Id VXLAN Network Id
:param uint16 UntaggedVlanId : Vlan associated with the untagged traffic. Used in conjunction with a given VTEP inner-vlan-handling-mode Vlan associated with the untagged traffic. Used in conjunction with a given VTEP inner-vlan-handling-mode
:param uint16 VlanId : Vlan associated with the Access targets. Used in conjunction with a given VTEP inner-vlan-handling-mode Vlan associated with the Access targets. Used in conjunction with a given VTEP inner-vlan-handling-mode
:param string AdminState : Administrative state of VXLAN layer Administrative state of VXLAN layer
"""
def createVxlanInstance(self,
Vni,
UntaggedVlanId,
VlanId,
AdminState='UP'):
obj = {
'Vni' : int(Vni),
'UntaggedVlanId' : UntaggedVlanId,
'VlanId' : VlanId,
'AdminState' : AdminState,
}
reqUrl = self.cfgUrlBase+'VxlanInstance'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVxlanInstance(self,
Vni):
obj = {
'Vni' : Vni,
}
reqUrl = self.cfgUrlBase+'VxlanInstance'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteVxlanInstanceById(self, objectId ):
reqUrl = self.cfgUrlBase+'VxlanInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateVxlanInstance(self,
Vni,
UntaggedVlanId = None,
VlanId = None,
AdminState = None):
obj = {}
if Vni != None :
obj['Vni'] = int(Vni)
if UntaggedVlanId != None :
obj['UntaggedVlanId'] = UntaggedVlanId
if VlanId != None :
obj['VlanId'] = VlanId
if AdminState != None :
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'VxlanInstance'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateVxlanInstanceById(self,
objectId,
UntaggedVlanId = None,
VlanId = None,
AdminState = None):
obj = {}
if UntaggedVlanId != None:
obj['UntaggedVlanId'] = UntaggedVlanId
if VlanId != None:
obj['VlanId'] = VlanId
if AdminState != None:
obj['AdminState'] = AdminState
reqUrl = self.cfgUrlBase+'VxlanInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateVxlanInstance(self,
Vni,
op,
path,
value,):
obj = {}
obj['Vni'] = Vni
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'VxlanInstance'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getVxlanInstance(self,
Vni):
obj = {
'Vni' : int(Vni),
}
reqUrl = self.cfgUrlBase + 'VxlanInstance'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getVxlanInstanceById(self, objectId ):
reqUrl = self.cfgUrlBase + 'VxlanInstance'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllVxlanInstances(self):
return self.getObjects('VxlanInstance', self.cfgUrlBase)
def getBGPv6RouteState(self,
CIDRLen,
Network):
obj = {
'CIDRLen' : int(CIDRLen),
'Network' : Network,
}
reqUrl = self.stateUrlBase + 'BGPv6Route'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv6RouteStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BGPv6Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv6RouteStates(self):
return self.getObjects('BGPv6Route', self.stateUrlBase)
def getBGPv4RouteState(self,
CIDRLen,
Network):
obj = {
'CIDRLen' : int(CIDRLen),
'Network' : Network,
}
reqUrl = self.stateUrlBase + 'BGPv4Route'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv4RouteStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'BGPv4Route'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv4RouteStates(self):
return self.getObjects('BGPv4Route', self.stateUrlBase)
def getAsicGlobalPMState(self,
Resource,
ModuleId):
obj = {
'Resource' : Resource,
'ModuleId' : int(ModuleId),
}
reqUrl = self.stateUrlBase + 'AsicGlobalPM'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getAsicGlobalPMStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'AsicGlobalPM'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllAsicGlobalPMStates(self):
return self.getObjects('AsicGlobalPM', self.stateUrlBase)
"""
.. automethod :: createPolicyDefinition(self,
:param string Name : Policy Name Policy Name
:param int32 Priority : Priority of the policy w.r.t other policies configured Priority of the policy w.r.t other policies configured
:param PolicyDefinitionStmtPriority StatementList : Specifies list of statements along with their precedence order. Specifies list of statements along with their precedence order.
:param string MatchType : Specifies whether to match all/any of the statements within this policy Specifies whether to match all/any of the statements within this policy
:param string PolicyType : Specifies the intended protocol application for the policy Specifies the intended protocol application for the policy
"""
def createPolicyDefinition(self,
Name,
Priority,
StatementList,
MatchType='all',
PolicyType='ALL'):
obj = {
'Name' : Name,
'Priority' : int(Priority),
'StatementList' : StatementList,
'MatchType' : MatchType,
'PolicyType' : PolicyType,
}
reqUrl = self.cfgUrlBase+'PolicyDefinition'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyDefinition(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'PolicyDefinition'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyDefinitionById(self, objectId ):
reqUrl = self.cfgUrlBase+'PolicyDefinition'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePolicyDefinition(self,
Name,
Priority = None,
StatementList = None,
MatchType = None,
PolicyType = None):
obj = {}
if Name != None :
obj['Name'] = Name
if Priority != None :
obj['Priority'] = int(Priority)
if StatementList != None :
obj['StatementList'] = StatementList
if MatchType != None :
obj['MatchType'] = MatchType
if PolicyType != None :
obj['PolicyType'] = PolicyType
reqUrl = self.cfgUrlBase+'PolicyDefinition'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePolicyDefinitionById(self,
objectId,
Priority = None,
StatementList = None,
MatchType = None,
PolicyType = None):
obj = {}
if Priority != None:
obj['Priority'] = Priority
if StatementList != None:
obj['StatementList'] = StatementList
if MatchType != None:
obj['MatchType'] = MatchType
if PolicyType != None:
obj['PolicyType'] = PolicyType
reqUrl = self.cfgUrlBase+'PolicyDefinition'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePolicyDefinition(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'PolicyDefinition'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPolicyDefinition(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'PolicyDefinition'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyDefinitionById(self, objectId ):
reqUrl = self.cfgUrlBase + 'PolicyDefinition'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyDefinitions(self):
return self.getObjects('PolicyDefinition', self.cfgUrlBase)
"""
.. automethod :: createBGPv4Aggregate(self,
:param string IpPrefix : IP Prefix in CIDR format to match IP Prefix in CIDR format to match
:param bool SendSummaryOnly : Send summary route only when aggregating routes Send summary route only when aggregating routes
:param bool GenerateASSet : Generate AS set when aggregating routes Generate AS set when aggregating routes
"""
def createBGPv4Aggregate(self,
IpPrefix,
SendSummaryOnly=False,
GenerateASSet=False):
obj = {
'IpPrefix' : IpPrefix,
'SendSummaryOnly' : True if SendSummaryOnly else False,
'GenerateASSet' : True if GenerateASSet else False,
}
reqUrl = self.cfgUrlBase+'BGPv4Aggregate'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv4Aggregate(self,
IpPrefix):
obj = {
'IpPrefix' : IpPrefix,
}
reqUrl = self.cfgUrlBase+'BGPv4Aggregate'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deleteBGPv4AggregateById(self, objectId ):
reqUrl = self.cfgUrlBase+'BGPv4Aggregate'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updateBGPv4Aggregate(self,
IpPrefix,
SendSummaryOnly = None,
GenerateASSet = None):
obj = {}
if IpPrefix != None :
obj['IpPrefix'] = IpPrefix
if SendSummaryOnly != None :
obj['SendSummaryOnly'] = True if SendSummaryOnly else False
if GenerateASSet != None :
obj['GenerateASSet'] = True if GenerateASSet else False
reqUrl = self.cfgUrlBase+'BGPv4Aggregate'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updateBGPv4AggregateById(self,
objectId,
SendSummaryOnly = None,
GenerateASSet = None):
obj = {}
if SendSummaryOnly != None:
obj['SendSummaryOnly'] = SendSummaryOnly
if GenerateASSet != None:
obj['GenerateASSet'] = GenerateASSet
reqUrl = self.cfgUrlBase+'BGPv4Aggregate'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdateBGPv4Aggregate(self,
IpPrefix,
op,
path,
value,):
obj = {}
obj['IpPrefix'] = IpPrefix
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'BGPv4Aggregate'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getBGPv4Aggregate(self,
IpPrefix):
obj = {
'IpPrefix' : IpPrefix,
}
reqUrl = self.cfgUrlBase + 'BGPv4Aggregate'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getBGPv4AggregateById(self, objectId ):
reqUrl = self.cfgUrlBase + 'BGPv4Aggregate'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllBGPv4Aggregates(self):
return self.getObjects('BGPv4Aggregate', self.cfgUrlBase)
def getSubIPv6IntfState(self,
IntfRef,
Type):
obj = {
'IntfRef' : IntfRef,
'Type' : Type,
}
reqUrl = self.stateUrlBase + 'SubIPv6Intf'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getSubIPv6IntfStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'SubIPv6Intf'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllSubIPv6IntfStates(self):
return self.getObjects('SubIPv6Intf', self.stateUrlBase)
"""
.. automethod :: createPolicyCondition(self,
:param string Name : PolicyConditionName PolicyConditionName
:param string ConditionType : Specifies the match criterion this condition defines Specifies the match criterion this condition defines
:param string Protocol : Protocol to match on if the ConditionType is set to MatchProtocol Protocol to match on if the ConditionType is set to MatchProtocol
:param uint32 MED : BGP MED value ro match on when ConditionType is MatchMED BGP MED value ro match on when ConditionType is MatchMED
:param string ExtendedCommunitySet :
:param uint32 LocalPref : BGP LocalPreference attribute value to match on when the ConditionType is MatchLocalPref. BGP LocalPreference attribute value to match on when the ConditionType is MatchLocalPref.
:param string ASPath : BGP ASPath value (specified using regular expressions) to match on when ConditionType is MatchASPath. BGP ASPath value (specified using regular expressions) to match on when ConditionType is MatchASPath.
:param string Community : BGP Community attrribute value to match on when the conditionType is MatchCommunity - based on RFC 1997. Can either specify the well-known communities or any other community value in the format AA BGP Community attrribute value to match on when the conditionType is MatchCommunity - based on RFC 1997. Can either specify the well-known communities or any other community value in the format AA
:param string IpPrefix : Used in conjunction with MaskLengthRange to specify the IP Prefix to match on when the ConditionType is MatchDstIpPrefix/MatchSrcIpPrefix. Used in conjunction with MaskLengthRange to specify the IP Prefix to match on when the ConditionType is MatchDstIpPrefix/MatchSrcIpPrefix.
:param string ExtendedCommunityType : Specifies BGP Extended Community type (used along with value)to match on when the conditionType is MatchExtendedCommunity - based on RFC 4360. Specifies BGP Extended Community type (used along with value)to match on when the conditionType is MatchExtendedCommunity - based on RFC 4360.
:param string ASPathSet : List of ASPath values to match on when ConditionType is MATCHASPath List of ASPath values to match on when ConditionType is MATCHASPath
:param string PrefixSet : Name of a pre-defined prefix set to be used as a condition qualifier. Name of a pre-defined prefix set to be used as a condition qualifier.
:param string MaskLengthRange : Used in conjuction with IpPrefix to specify specify the IP Prefix to match on when the ConditionType is MatchDstIpPrefix/MatchSrcIpPrefix. Used in conjuction with IpPrefix to specify specify the IP Prefix to match on when the ConditionType is MatchDstIpPrefix/MatchSrcIpPrefix.
:param string CommunitySet : List of BGP communities attribute to match on when the conditionType is MatchCommunity List of BGP communities attribute to match on when the conditionType is MatchCommunity
:param string ExtendedCommunityValue : Specifies BGP Extended Community value (used along with type)to match on when the conditionType is MatchExtendedCommunity - based on RFC 4360.This is a Specifies BGP Extended Community value (used along with type)to match on when the conditionType is MatchExtendedCommunity - based on RFC 4360.This is a
"""
def createPolicyCondition(self,
Name,
ConditionType,
Protocol,
MED=0,
ExtendedCommunitySet='',
LocalPref=0,
ASPath='',
Community='',
IpPrefix='',
ExtendedCommunityType='',
ASPathSet='',
PrefixSet='',
MaskLengthRange='',
CommunitySet='',
ExtendedCommunityValue=''):
obj = {
'Name' : Name,
'ConditionType' : ConditionType,
'Protocol' : Protocol,
'MED' : int(MED),
'ExtendedCommunitySet' : ExtendedCommunitySet,
'LocalPref' : int(LocalPref),
'ASPath' : ASPath,
'Community' : Community,
'IpPrefix' : IpPrefix,
'ExtendedCommunityType' : ExtendedCommunityType,
'ASPathSet' : ASPathSet,
'PrefixSet' : PrefixSet,
'MaskLengthRange' : MaskLengthRange,
'CommunitySet' : CommunitySet,
'ExtendedCommunityValue' : ExtendedCommunityValue,
}
reqUrl = self.cfgUrlBase+'PolicyCondition'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyCondition(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase+'PolicyCondition'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePolicyConditionById(self, objectId ):
reqUrl = self.cfgUrlBase+'PolicyCondition'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePolicyCondition(self,
Name,
ConditionType = None,
Protocol = None,
MED = None,
ExtendedCommunitySet = None,
LocalPref = None,
ASPath = None,
Community = None,
IpPrefix = None,
ExtendedCommunityType = None,
ASPathSet = None,
PrefixSet = None,
MaskLengthRange = None,
CommunitySet = None,
ExtendedCommunityValue = None):
obj = {}
if Name != None :
obj['Name'] = Name
if ConditionType != None :
obj['ConditionType'] = ConditionType
if Protocol != None :
obj['Protocol'] = Protocol
if MED != None :
obj['MED'] = int(MED)
if ExtendedCommunitySet != None :
obj['ExtendedCommunitySet'] = ExtendedCommunitySet
if LocalPref != None :
obj['LocalPref'] = int(LocalPref)
if ASPath != None :
obj['ASPath'] = ASPath
if Community != None :
obj['Community'] = Community
if IpPrefix != None :
obj['IpPrefix'] = IpPrefix
if ExtendedCommunityType != None :
obj['ExtendedCommunityType'] = ExtendedCommunityType
if ASPathSet != None :
obj['ASPathSet'] = ASPathSet
if PrefixSet != None :
obj['PrefixSet'] = PrefixSet
if MaskLengthRange != None :
obj['MaskLengthRange'] = MaskLengthRange
if CommunitySet != None :
obj['CommunitySet'] = CommunitySet
if ExtendedCommunityValue != None :
obj['ExtendedCommunityValue'] = ExtendedCommunityValue
reqUrl = self.cfgUrlBase+'PolicyCondition'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePolicyConditionById(self,
objectId,
ConditionType = None,
Protocol = None,
MED = None,
ExtendedCommunitySet = None,
LocalPref = None,
ASPath = None,
Community = None,
IpPrefix = None,
ExtendedCommunityType = None,
ASPathSet = None,
PrefixSet = None,
MaskLengthRange = None,
CommunitySet = None,
ExtendedCommunityValue = None):
obj = {}
if ConditionType != None:
obj['ConditionType'] = ConditionType
if Protocol != None:
obj['Protocol'] = Protocol
if MED != None:
obj['MED'] = MED
if ExtendedCommunitySet != None:
obj['ExtendedCommunitySet'] = ExtendedCommunitySet
if LocalPref != None:
obj['LocalPref'] = LocalPref
if ASPath != None:
obj['ASPath'] = ASPath
if Community != None:
obj['Community'] = Community
if IpPrefix != None:
obj['IpPrefix'] = IpPrefix
if ExtendedCommunityType != None:
obj['ExtendedCommunityType'] = ExtendedCommunityType
if ASPathSet != None:
obj['ASPathSet'] = ASPathSet
if PrefixSet != None:
obj['PrefixSet'] = PrefixSet
if MaskLengthRange != None:
obj['MaskLengthRange'] = MaskLengthRange
if CommunitySet != None:
obj['CommunitySet'] = CommunitySet
if ExtendedCommunityValue != None:
obj['ExtendedCommunityValue'] = ExtendedCommunityValue
reqUrl = self.cfgUrlBase+'PolicyCondition'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePolicyCondition(self,
Name,
op,
path,
value,):
obj = {}
obj['Name'] = Name
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'PolicyCondition'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPolicyCondition(self,
Name):
obj = {
'Name' : Name,
}
reqUrl = self.cfgUrlBase + 'PolicyCondition'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPolicyConditionById(self, objectId ):
reqUrl = self.cfgUrlBase + 'PolicyCondition'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPolicyConditions(self):
return self.getObjects('PolicyCondition', self.cfgUrlBase)
"""
.. automethod :: createPort(self,
:param string IntfRef : Front panel port name or system assigned interface id Front panel port name or system assigned interface id
:param int32 IfIndex : System assigned interface id for this port. Read only attribute System assigned interface id for this port. Read only attribute
:param string PhyIntfType : Type of internal phy interface Type of internal phy interface
:param string MacAddr : Mac address associated with this port Mac address associated with this port
:param int32 Speed : Port speed in Mbps Port speed in Mbps
:param string MediaType : Type of media inserted into this port Type of media inserted into this port
:param int32 Mtu : Maximum transmission unit size for this port Maximum transmission unit size for this port
:param string BreakOutMode : Break out mode for the port. Only applicable on ports that support breakout. Break out mode for the port. Only applicable on ports that support breakout.
:param bool PRBSRxEnable : Enable/Disable PRBS checker on this port Enable/Disable PRBS checker on this port
:param string Description : User provided string description User provided string description
:param string PRBSPolynomial : PRBS polynomial to use for generation/checking PRBS polynomial to use for generation/checking
:param string Duplex : Duplex setting for this port Duplex setting for this port
:param string LoopbackMode : Desired loopback setting for this port Desired loopback setting for this port
:param bool EnableFEC : Enable/Disable 802.3bj FEC on this interface Enable/Disable 802.3bj FEC on this interface
:param string AdminState : Administrative state of this port Administrative state of this port
:param string Autoneg : Autonegotiation setting for this port Autonegotiation setting for this port
:param bool PRBSTxEnable : Enable/Disable generation of PRBS on this port Enable/Disable generation of PRBS on this port
"""
def createPort(self,
IntfRef,
IfIndex,
PhyIntfType,
MacAddr,
Speed,
MediaType,
Mtu,
BreakOutMode,
PRBSRxEnable=False,
Description='FP Port',
PRBSPolynomial='2^7',
Duplex='Full_Duplex',
LoopbackMode='NONE',
EnableFEC=False,
AdminState='DOWN',
Autoneg='OFF',
PRBSTxEnable=False):
obj = {
'IntfRef' : IntfRef,
'IfIndex' : int(IfIndex),
'PhyIntfType' : PhyIntfType,
'MacAddr' : MacAddr,
'Speed' : int(Speed),
'MediaType' : MediaType,
'Mtu' : int(Mtu),
'BreakOutMode' : BreakOutMode,
'PRBSRxEnable' : True if PRBSRxEnable else False,
'Description' : Description,
'PRBSPolynomial' : PRBSPolynomial,
'Duplex' : Duplex,
'LoopbackMode' : LoopbackMode,
'EnableFEC' : True if EnableFEC else False,
'AdminState' : AdminState,
'Autoneg' : Autoneg,
'PRBSTxEnable' : True if PRBSTxEnable else False,
}
reqUrl = self.cfgUrlBase+'Port'
if self.authenticate == True:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.post(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePort(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase+'Port'
if self.authenticate == True:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.delete(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def deletePortById(self, objectId ):
reqUrl = self.cfgUrlBase+'Port'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
else:
r = requests.delete(reqUrl, data=None, headers=headers,timeout=self.timeout)
return r
def updatePort(self,
IntfRef,
IfIndex = None,
PhyIntfType = None,
MacAddr = None,
Speed = None,
MediaType = None,
Mtu = None,
BreakOutMode = None,
PRBSRxEnable = None,
Description = None,
PRBSPolynomial = None,
Duplex = None,
LoopbackMode = None,
EnableFEC = None,
AdminState = None,
Autoneg = None,
PRBSTxEnable = None):
obj = {}
if IntfRef != None :
obj['IntfRef'] = IntfRef
if IfIndex != None :
obj['IfIndex'] = int(IfIndex)
if PhyIntfType != None :
obj['PhyIntfType'] = PhyIntfType
if MacAddr != None :
obj['MacAddr'] = MacAddr
if Speed != None :
obj['Speed'] = int(Speed)
if MediaType != None :
obj['MediaType'] = MediaType
if Mtu != None :
obj['Mtu'] = int(Mtu)
if BreakOutMode != None :
obj['BreakOutMode'] = BreakOutMode
if PRBSRxEnable != None :
obj['PRBSRxEnable'] = True if PRBSRxEnable else False
if Description != None :
obj['Description'] = Description
if PRBSPolynomial != None :
obj['PRBSPolynomial'] = PRBSPolynomial
if Duplex != None :
obj['Duplex'] = Duplex
if LoopbackMode != None :
obj['LoopbackMode'] = LoopbackMode
if EnableFEC != None :
obj['EnableFEC'] = True if EnableFEC else False
if AdminState != None :
obj['AdminState'] = AdminState
if Autoneg != None :
obj['Autoneg'] = Autoneg
if PRBSTxEnable != None :
obj['PRBSTxEnable'] = True if PRBSTxEnable else False
reqUrl = self.cfgUrlBase+'Port'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def updatePortById(self,
objectId,
IfIndex = None,
PhyIntfType = None,
MacAddr = None,
Speed = None,
MediaType = None,
Mtu = None,
BreakOutMode = None,
PRBSRxEnable = None,
Description = None,
PRBSPolynomial = None,
Duplex = None,
LoopbackMode = None,
EnableFEC = None,
AdminState = None,
Autoneg = None,
PRBSTxEnable = None):
obj = {}
if IfIndex != None:
obj['IfIndex'] = IfIndex
if PhyIntfType != None:
obj['PhyIntfType'] = PhyIntfType
if MacAddr != None:
obj['MacAddr'] = MacAddr
if Speed != None:
obj['Speed'] = Speed
if MediaType != None:
obj['MediaType'] = MediaType
if Mtu != None:
obj['Mtu'] = Mtu
if BreakOutMode != None:
obj['BreakOutMode'] = BreakOutMode
if PRBSRxEnable != None:
obj['PRBSRxEnable'] = PRBSRxEnable
if Description != None:
obj['Description'] = Description
if PRBSPolynomial != None:
obj['PRBSPolynomial'] = PRBSPolynomial
if Duplex != None:
obj['Duplex'] = Duplex
if LoopbackMode != None:
obj['LoopbackMode'] = LoopbackMode
if EnableFEC != None:
obj['EnableFEC'] = EnableFEC
if AdminState != None:
obj['AdminState'] = AdminState
if Autoneg != None:
obj['Autoneg'] = Autoneg
if PRBSTxEnable != None:
obj['PRBSTxEnable'] = PRBSTxEnable
reqUrl = self.cfgUrlBase+'Port'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=headers,timeout=self.timeout)
return r
def patchUpdatePort(self,
IntfRef,
op,
path,
value,):
obj = {}
obj['IntfRef'] = IntfRef
obj['patch']=[{'op':op,'path':path,'value':value}]
reqUrl = self.cfgUrlBase+'Port'
if self.authenticate == True:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.patch(reqUrl, data=json.dumps(obj), headers=patchheaders, timeout=self.timeout)
return r
def getPort(self,
IntfRef):
obj = {
'IntfRef' : IntfRef,
}
reqUrl = self.cfgUrlBase + 'Port'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getPortById(self, objectId ):
reqUrl = self.cfgUrlBase + 'Port'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllPorts(self):
return self.getObjects('Port', self.cfgUrlBase)
def getOspfIfEntryState(self,
IfIpAddress,
AddressLessIf):
obj = {
'IfIpAddress' : IfIpAddress,
'AddressLessIf' : int(AddressLessIf),
}
reqUrl = self.stateUrlBase + 'OspfIfEntry'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getOspfIfEntryStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'OspfIfEntry'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllOspfIfEntryStates(self):
return self.getObjects('OspfIfEntry', self.stateUrlBase)
def getRIBEventState(self,
Index):
obj = {
'Index' : int(Index),
}
reqUrl = self.stateUrlBase + 'RIBEvent'
if self.authenticate == True:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=json.dumps(obj), headers=headers, timeout=self.timeout)
return r
def getRIBEventStateById(self, objectId ):
reqUrl = self.stateUrlBase + 'RIBEvent'+"/%s"%(objectId)
if self.authenticate == True:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout, auth=(self.user, self.passwd), verify=False)
else:
r = requests.get(reqUrl, data=None, headers=headers, timeout=self.timeout)
return r
def getAllRIBEventStates(self):
return self.getObjects('RIBEvent', self.stateUrlBase)
| 44.788277
| 2,087
| 0.563904
| 70,446
| 740,395
| 5.926525
| 0.034949
| 0.037337
| 0.07269
| 0.090898
| 0.862754
| 0.838345
| 0.80205
| 0.793305
| 0.773816
| 0.766218
| 0
| 0.004454
| 0.339828
| 740,395
| 16,530
| 2,088
| 44.790986
| 0.8497
| 0.000841
| 0
| 0.797157
| 0
| 0
| 0.060208
| 0.007076
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.062146
| 0.001512
| null | null | 0.000227
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
83e489c8342d100001beeea78b4d1a0d85036ee5
| 9,665
|
py
|
Python
|
tests/features/replace_test.py
|
nacleric/babi
|
f1ce6d995bf3859459c5dda1d2188b6992434d30
|
[
"MIT"
] | 223
|
2019-07-14T04:21:24.000Z
|
2022-03-23T21:34:11.000Z
|
tests/features/replace_test.py
|
nacleric/babi
|
f1ce6d995bf3859459c5dda1d2188b6992434d30
|
[
"MIT"
] | 101
|
2019-09-02T02:25:22.000Z
|
2022-03-23T20:18:34.000Z
|
tests/features/replace_test.py
|
nacleric/babi
|
f1ce6d995bf3859459c5dda1d2188b6992434d30
|
[
"MIT"
] | 57
|
2019-10-19T23:30:13.000Z
|
2022-03-21T21:43:05.000Z
|
from __future__ import annotations
import pytest
from testing.runner import and_exit
@pytest.mark.parametrize('key', ('^C', 'Enter'))
def test_replace_cancel(run, key):
with run() as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press(key)
h.await_text('cancelled')
def test_replace_invalid_regex(run):
with run() as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('(')
h.await_text("invalid regex: '('")
def test_replace_invalid_replacement(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('line_0')
h.await_text('replace with:')
h.press_and_enter('\\')
h.await_text('invalid replacement string')
def test_replace_cancel_at_replace_string(run):
with run() as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('hello')
h.await_text('replace with:')
h.press('^C')
h.await_text('cancelled')
@pytest.mark.parametrize('key', ('y', 'Y'))
def test_replace_actual_contents(run, ten_lines, key):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('line_0')
h.await_text('replace with:')
h.press_and_enter('ohai')
h.await_text('replace [yes, no, all]?')
h.press(key)
h.await_text_missing('line_0')
h.await_text('ohai')
h.await_text(' *')
h.await_text('replaced 1 occurrence')
def test_replace_sets_x_hint_properly(run, tmpdir):
f = tmpdir.join('f')
contents = '''\
beginning_line
match me!
'''
f.write(contents)
with run(str(f)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('me!')
h.await_text('replace with:')
h.press_and_enter('youuuu')
h.await_text('replace [yes, no, all]?')
h.press('y')
h.await_cursor_position(x=6, y=3)
h.press('Up')
h.press('Up')
h.await_cursor_position(x=6, y=1)
def test_replace_cancel_at_individual_replace(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter(r'line_\d')
h.await_text('replace with:')
h.press_and_enter('ohai')
h.await_text('replace [yes, no, all]?')
h.press('^C')
h.await_text('cancelled')
def test_replace_unknown_characters_at_individual_replace(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter(r'line_\d')
h.await_text('replace with:')
h.press_and_enter('ohai')
h.await_text('replace [yes, no, all]?')
h.press('?')
h.press('^C')
h.await_text('cancelled')
def test_replace_say_no_to_individual_replace(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('line_[135]')
h.await_text('replace with:')
h.press_and_enter('ohai')
h.await_text('replace [yes, no, all]?')
h.press('y')
h.await_text_missing('line_1')
h.press('n')
h.await_text('line_3')
h.press('y')
h.await_text_missing('line_5')
h.await_text('replaced 2 occurrences')
def test_replace_all(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter(r'line_(\d)')
h.await_text('replace with:')
h.press_and_enter(r'ohai+\1')
h.await_text('replace [yes, no, all]?')
h.press('a')
h.await_text_missing('line')
h.await_text('ohai+1')
h.await_text('replaced 10 occurrences')
def test_replace_with_empty_string(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('line_1')
h.await_text('replace with:')
h.press('Enter')
h.await_text('replace [yes, no, all]?')
h.press('y')
h.await_text_missing('line_1')
def test_replace_search_not_found(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('wat')
# TODO: would be nice to not prompt for a replace string in this case
h.await_text('replace with:')
h.press('Enter')
h.await_text('no matches')
def test_replace_small_window_size(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('line')
h.await_text('replace with:')
h.press_and_enter('wat')
h.await_text('replace [yes, no, all]?')
with h.resize(width=8, height=24):
h.await_text('replace…')
h.press('^C')
def test_replace_height_1_highlight(run, tmpdir):
f = tmpdir.join('f')
f.write('x' * 90)
with run(str(f)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('^x+$')
h.await_text('replace with:')
h.press('Enter')
h.await_text('replace [yes, no, all]?')
with h.resize(width=80, height=1):
h.await_text_missing('xxxxx')
h.await_text('xxxxx')
h.press('^C')
def test_replace_line_goes_off_screen(run):
with run() as h, and_exit(h):
h.press(f'{"a" * 20}{"b" * 90}')
h.press('^A')
h.await_text(f'{"a" * 20}{"b" * 59}»')
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('b+')
h.await_text('replace with:')
h.press_and_enter('wat')
h.await_text('replace [yes, no, all]?')
h.await_text(f'{"a" * 20}{"b" * 59}»')
h.press('y')
h.await_text(f'{"a" * 20}wat')
h.await_text('replaced 1 occurrence')
def test_replace_undo_undoes_only_one(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('line')
h.await_text('replace with:')
h.press_and_enter('wat')
h.press('y')
h.await_text_missing('line_0')
h.press('y')
h.await_text_missing('line_1')
h.press('^C')
h.press('M-u')
h.await_text('line_1')
h.await_text_missing('line_0')
def test_replace_multiple_occurrences_in_line(run):
with run() as h, and_exit(h):
h.press('baaaaabaaaaa')
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('a+')
h.await_text('replace with:')
h.press_and_enter('q')
h.await_text('replace [yes, no, all]?')
h.press('a')
h.await_text('bqbq')
def test_replace_after_wrapping(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('Down')
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('line_[02]')
h.await_text('replace with:')
h.press_and_enter('ohai')
h.await_text('replace [yes, no, all]?')
h.press('y')
h.await_text_missing('line_2')
h.press('y')
h.await_text_missing('line_0')
h.await_text('replaced 2 occurrences')
def test_replace_after_cursor_after_wrapping(run):
with run() as h, and_exit(h):
h.press('baaab')
h.press('Left')
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('b')
h.await_text('replace with:')
h.press_and_enter('q')
h.await_text('replace [yes, no, all]?')
h.press('n')
h.press('y')
h.await_text('replaced 1 occurrence')
h.await_text('qaaab')
def test_replace_separate_line_after_wrapping(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('Down')
h.press('Down')
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('line_[01]')
h.await_text('replace with:')
h.press_and_enter('_')
h.await_text('replace [yes, no, all]?')
h.press('y')
h.await_text_missing('line_0')
h.press('y')
h.await_text_missing('line_1')
def test_replace_with_newline_characters(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('(line)_([01])')
h.await_text('replace with:')
h.press_and_enter(r'\1\n\2')
h.await_text('replace [yes, no, all]?')
h.press('a')
h.await_text_missing('line_0')
h.await_text_missing('line_1')
h.await_text('line\n0\nline\n1\n')
def test_replace_with_multiple_newline_characters(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('(li)(ne)_(1)')
h.await_text('replace with:')
h.press_and_enter(r'\1\n\2\n\3\n')
h.await_text('replace [yes, no, all]?')
h.press('a')
h.await_text_missing('line_1')
h.await_text('li\nne\n1\n\nline_2')
| 30.585443
| 77
| 0.579307
| 1,445
| 9,665
| 3.620069
| 0.096886
| 0.118142
| 0.19308
| 0.099025
| 0.817817
| 0.800994
| 0.768496
| 0.745555
| 0.735232
| 0.680941
| 0
| 0.010173
| 0.247387
| 9,665
| 315
| 78
| 30.68254
| 0.708276
| 0.006932
| 0
| 0.679389
| 0
| 0
| 0.200813
| 0
| 0
| 0
| 0
| 0.003175
| 0
| 1
| 0.083969
| false
| 0
| 0.01145
| 0
| 0.09542
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83fe1bda65fe336bf4cc8e0819ccd63072c07951
| 14,544
|
py
|
Python
|
scripts/automation/regression/stateless_tests/stl_ns_test.py
|
timgates42/trex-core
|
efe94752fcb2d0734c83d4877afe92a3dbf8eccd
|
[
"Apache-2.0"
] | 956
|
2015-06-24T15:04:55.000Z
|
2022-03-30T06:25:04.000Z
|
scripts/automation/regression/stateless_tests/stl_ns_test.py
|
angelyouyou/trex-core
|
fddf78584cae285d9298ef23f9f5c8725e16911e
|
[
"Apache-2.0"
] | 782
|
2015-09-20T15:19:00.000Z
|
2022-03-31T23:52:05.000Z
|
scripts/automation/regression/stateless_tests/stl_ns_test.py
|
angelyouyou/trex-core
|
fddf78584cae285d9298ef23f9f5c8725e16911e
|
[
"Apache-2.0"
] | 429
|
2015-06-27T19:34:21.000Z
|
2022-03-23T11:02:51.000Z
|
#!/usr/bin/python
from .stl_general_test import CStlGeneral_Test, CTRexScenario
from trex_stl_lib.api import *
from trex.common.stats.trex_ns import CNsStats
from trex.common.services.trex_service_icmp import ServiceICMP
import pprint
class STLNS_Test(CStlGeneral_Test):
"""Tests for NS function """
def setUp(self):
CStlGeneral_Test.setUp(self)
if self.is_vdev:
self.skip("We don't know what to expect with vdev.")
if not (self.is_linux_stack and self.is_loopback):
self.skip("We need linux stack and loopback for this tests to work")
print('')
self.stl_trex.reset()
self.stl_trex.set_service_mode()
self.stl_trex.namespace_remove_all()
def tearDown(self):
CStlGeneral_Test.tearDown(self)
self.stl_trex.namespace_remove_all()
self.stl_trex.set_service_mode(enabled = False)
def test_ns_add_remove(self):
c= self.stl_trex
port = CTRexScenario.ports_map['bi'][0][0]
print('Using port %s' % port)
c.namespace_remove_all()
# clear counters
cmds=NSCmds()
cmds.clear_counters()
c.set_namespace_start(port, cmds)
c.wait_for_async_results(port);
# add
cmds=NSCmds()
MAC="00:01:02:03:04:05"
cmds.add_node(MAC)
cmds.set_ipv4(MAC,"1.1.1.3","1.1.1.2")
cmds.set_ipv6(MAC,True)
c.set_namespace_start(port, cmds)
c.wait_for_async_results(port);
# get nodes
cmds=NSCmds()
cmds.get_nodes()
c.set_namespace_start(port, cmds)
r=c.wait_for_async_results(port);
macs=r[0]['result']['nodes']
print('MACs of nodes: %s' % macs)
if len(macs) != 1:
self.fail(' must be exactly one MAC')
if macs[0] != "00:01:02:03:04:05":
self.fail(' macs should include 00:01:02:03:04:05')
cmds=NSCmds()
cmds.counters_get_meta()
cmds.counters_get_values()
c.set_namespace_start(port, cmds)
r=c.wait_for_async_results(port);
ns_stat = CNsStats()
ns_stat.set_meta_values(r[0]['result']['data'], r[1]['result'][''])
cnt = ns_stat.get_values_stats()
print('Counters:')
pprint.pprint(cnt)
for k, v in cnt.items():
assert v < 1e6, 'Value is too big in counter %s=%s' % (k, v)
assert cnt['tx_multicast_pkts']>0, 'multicast rx counter is zero'
# remove Node
cmds=NSCmds()
cmds.remove_node(MAC)
c.set_namespace_start(port, cmds)
r=c.wait_for_async_results(port);
cmds=NSCmds()
cmds.get_nodes()
c.set_namespace_start(port, cmds)
r=c.wait_for_async_results(port);
macs=r[0]['result']['nodes']
print('MACs of nodes: %s' % macs)
if len(macs) != 0:
self.fail(' must be no MACs, we deleted node')
# clear counters
cmds=NSCmds()
cmds.clear_counters()
cmds.counters_get_meta()
cmds.counters_get_values()
c.set_namespace_start(port, cmds)
r=c.wait_for_async_results(port);
ns_stat = CNsStats()
ns_stat.set_meta_values(r[1]['result']['data'], r[2]['result'][''])
cnt = ns_stat.get_values_stats()
print('Counters:')
pprint.pprint(cnt)
assert len(cnt)==0, 'Counters should be zero'
def test_ping_to_ns(self):
# this test works on specific setup with specific configuration
if not CTRexScenario.setup_name in ('trex17'):
return
c = self.stl_trex
try:
c.set_port_attr(promiscuous = True, multicast = True)
cmds=NSCmds()
MAC="00:01:02:03:04:05"
cmds.add_node(MAC)
cmds.set_ipv4(MAC,"1.1.1.3","1.1.1.2")
cmds.set_ipv6(MAC,True)
c.set_namespace_start(0, cmds)
c.wait_for_async_results(0)
c.set_l3_mode_line('-p 1 --src 1.1.1.2 --dst 1.1.1.3')
r=c.ping_ip(1,'1.1.1.3')
assert len(r)==5, 'should be 5 responses '
assert r[0].state == ServiceICMP.PINGRecord.SUCCESS
finally:
c.set_l3_mode_line('-p 1 --src 1.1.1.2 --dst 1.1.1.1')
c.set_port_attr(promiscuous = False, multicast = False)
def test_ping_with_vlan(self):
c = self.stl_trex
try:
c.set_port_attr(promiscuous = True, multicast = True)
cmds=NSCmds()
MAC = "00:01:02:03:04:05"
cmds.add_node(MAC)
cmds.set_ipv4(MAC,"1.1.1.3","1.1.1.2")
cmds.set_ipv6(MAC,True)
cmds.set_vlan(MAC, [21], [0x8100])
mac2 = "00:01:02:03:04:06"
cmds.add_node(mac2)
cmds.set_ipv4(mac2,"1.1.1.4","1.1.1.2")
cmds.set_ipv6(mac2,True)
c.set_namespace_start(0, cmds)
c.wait_for_async_results(0)
c.set_l3_mode_line('-p 1 --src 1.1.1.2 --dst 1.1.1.4')
r = c.ping_ip(1,'1.1.1.4')
assert len(r) == 5, 'should be 5 responses '
assert r[0].state == ServiceICMP.PINGRecord.SUCCESS
finally:
c.set_l3_mode_line('-p 1 --src 1.1.1.2 --dst 1.1.1.1')
c.set_port_attr(promiscuous = False, multicast = False)
def test_many_ns(self):
def get_mac (prefix,index):
mac="{}:{:02x}:{:02x}".format(prefix,(index>>8)&0xff,(index&0xff))
return (mac)
def get_ipv4 (prefix,index):
ipv4="{}.{:d}.{:d}".format(prefix,(index>>8)&0xff,(index&0xff))
return(ipv4)
def build_network (size):
cmds=NSCmds()
MAC_PREFIX="00:01:02:03"
IPV4_PREFIX="1.1"
IPV4_DG ='1.1.1.2'
for i in range(size):
mac = get_mac (MAC_PREFIX,i+257+1)
ipv4 = get_ipv4 (IPV4_PREFIX,259+i)
cmds.add_node(mac)
cmds.set_ipv4(mac,ipv4,IPV4_DG)
cmds.set_ipv6(mac,True)
return (cmds)
c = self.stl_trex
try:
c.namespace_remove_all()
cmds = build_network (100)
c.set_namespace_start(0, cmds)
c.wait_for_async_results(0)
cmds=NSCmds()
cmds.get_nodes()
c.set_namespace_start(0, cmds)
r=c.wait_for_async_results(0);
macs=r[0]['result']['nodes']
print(macs)
assert len(macs) == 100, 'number of namespace is not correct '
finally:
c.namespace_remove_all()
#####################
# Shared ns Tests #
#####################
def _create_shared_ns(self, port):
r = self.stl_trex.set_namespace(port, method = "add_shared_ns")
return str(r['result'])
def test_shared_ns_add_remove(self):
c = self.stl_trex
port = CTRexScenario.ports_map['bi'][0][0]
print('Using port %s' % port)
c.namespace_remove_all()
# clear counters
cmds = NSCmds()
cmds.clear_counters()
c.set_namespace_start(port, cmds)
c.wait_for_async_results(port)
# add shared ns
ns_name = self._create_shared_ns(port)
# add veth to ns
cmds = NSCmds()
MAC = "00:01:02:03:04:05"
cmds.add_node(MAC, shared_ns = ns_name)
cmds.set_ipv4(MAC, ipv4 = "1.1.1.3", subnet = 24, shared_ns = True)
cmds.set_ipv6(MAC, enable = True, shared_ns = True)
cmds.set_vlan(MAC, vlans = [22], tpids = [0x8011])
c.set_namespace_start(port, cmds)
c.wait_for_async_results(port)
# get nodes
cmds = NSCmds()
cmds.get_nodes()
c.set_namespace_start(port, cmds)
r = c.wait_for_async_results(port)
macs = r[0]['result']['nodes']
print('MACs of nodes: %s' % macs)
if len(macs) != 1:
self.fail(' must be exactly one MAC')
if macs[0] != "00:01:02:03:04:05":
self.fail(' macs should include 00:01:02:03:04:05')
cmds = NSCmds()
cmds.counters_get_meta()
cmds.counters_get_values()
c.set_namespace_start(port, cmds)
r = c.wait_for_async_results(port)
ns_stat = CNsStats()
ns_stat.set_meta_values(r[0]['result']['data'], r[1]['result'][''])
cnt = ns_stat.get_values_stats()
print('Counters:')
pprint.pprint(cnt)
for k, v in cnt.items():
assert v < 1e6, 'Value is too big in counter %s=%s' % (k, v)
assert cnt['tx_multicast_pkts']>0, 'multicast rx counter is zero'
# remove Node
cmds = NSCmds()
cmds.remove_node(MAC)
c.set_namespace_start(port, cmds)
r = c.wait_for_async_results(port)
cmds = NSCmds()
cmds.get_nodes()
c.set_namespace_start(port, cmds)
r = c.wait_for_async_results(port)
macs = r[0]['result']['nodes']
print('MACs of nodes: %s' % macs)
if len(macs) != 0:
self.fail(' must be no MACs, we deleted node')
# clear counters
cmds = NSCmds()
cmds.clear_counters()
cmds.counters_get_meta()
cmds.counters_get_values()
c.set_namespace_start(port, cmds)
r = c.wait_for_async_results(port)
ns_stat = CNsStats()
ns_stat.set_meta_values(r[1]['result']['data'], r[2]['result'][''])
cnt = ns_stat.get_values_stats()
print('Counters:')
pprint.pprint(cnt)
assert len(cnt) == 0, 'Counters should be zero'
def test_many_shared_ns(self):
def get_mac (prefix, index):
mac = "{}:{:02x}:{:02x}".format(prefix, (index>>8) & 0xff,(index & 0xff))
return mac
def get_ipv4 (prefix, index):
ipv4 = "{}.{:d}.{:d}".format(prefix, (index >> 8) & 0xff,(index & 0xff))
return ipv4
def build_network (size, ns_name):
cmds = NSCmds()
MAC_PREFIX = "00:01:02:03"
IPV4_PREFIX = "1.1"
IPV4_DG = '1.1.1.2'
ipv4_subnet = 24
for i in range(size):
mac = get_mac(MAC_PREFIX,i+257+1)
ipv4 = get_ipv4 (IPV4_PREFIX,259+i)
cmds.add_node(mac, shared_ns = ns_name)
cmds.set_ipv4(mac, ipv4 = ipv4, subnet = ipv4_subnet, shared_ns = True)
cmds.set_ipv6(mac, enable = True, shared_ns = True)
return cmds
try:
c = self.stl_trex
c.namespace_remove_all()
ns_name = self._create_shared_ns(port = 0)
cmds = build_network (100, ns_name = ns_name)
c.set_namespace_start(0, cmds)
c.wait_for_async_results(0)
cmds = NSCmds()
cmds.get_nodes()
c.set_namespace_start(0, cmds)
r = c.wait_for_async_results(0)
macs = r[0]['result']['nodes']
print(macs)
assert len(macs) == 100, 'number of namespace is not correct'
finally:
c.namespace_remove_all()
def test_ping_to_shared_ns(self):
# this test works on specific setup with specific configuration
if not CTRexScenario.setup_name in ('trex17'):
return
c = self.stl_trex
try:
c.set_port_attr(promiscuous = True, multicast = True)
c.set_namespace(0, method = 'remove_all')
ns_name = self._create_shared_ns(port = 0)
cmds = NSCmds()
MAC = "00:01:02:03:04:05"
cmds.add_node(MAC, shared_ns = ns_name)
cmds.set_ipv4(MAC, ipv4 = "1.1.1.3", subnet = 24, shared_ns = True)
cmds.set_dg(shared_ns = ns_name, dg = "1.1.1.2")
cmds.set_ipv6(MAC,enable = True, shared_ns = True)
c.set_namespace_start(0, cmds)
c.wait_for_async_results(0)
c.set_l3_mode_line('-p 1 --src 1.1.1.2 --dst 1.1.1.3')
r = c.ping_ip(1, '1.1.1.3')
assert len(r) == 5, 'should be 5 responses '
assert r[0].state == ServiceICMP.PINGRecord.SUCCESS
finally:
c.set_l3_mode_line('-p 1 --src 1.1.1.2 --dst 1.1.1.1')
c.set_port_attr(promiscuous = False, multicast = False)
def test_get_shared_ns_node_info(self):
c = self.stl_trex
MAC = "00:01:02:03:04:05"
try:
c.namespace_remove_all()
ns_name = self._create_shared_ns(port = 0)
cmds = NSCmds()
cmds.add_node(MAC, shared_ns = ns_name)
cmds.set_ipv4(MAC, ipv4 = "1.1.1.3", subnet = 24, shared_ns = True)
cmds.set_ipv6(MAC, enable = True, shared_ns = True)
cmds.set_vlan(MAC, vlans = [22], tpids = [0x8100])
c.set_namespace_start(0, cmds)
c.wait_for_async_results(0)
res = c.set_namespace(0, method = "get_nodes_info", macs_list = [MAC])
nodes = res['result']['nodes']
assert(len(nodes) == 1)
node_info = nodes[0]
assert(node_info['ether']['src'] == MAC)
assert(node_info['ipv4']['src'] == "1.1.1.3")
assert(node_info['ipv4']['subnet'] == 24)
assert(node_info['ipv6']['enabled'] == True)
assert(node_info['vlan']['tags'] == [22])
assert(node_info['vlan']['tpids'] == [0x8100])
finally:
c.namespace_remove_all()
def test_setting_shared_ns_vlans(self):
c = self.stl_trex
try:
c.namespace_remove_all()
ns_name = self._create_shared_ns(port = 0)
MAC = "00:01:02:03:04:05"
c.set_namespace(0, method = "add_node" ,mac = MAC, shared_ns = ns_name)
vlans_list = [[22], [22, 23], [22, 23]]
tpids_list = [[0x8100], [0x8100, 0x8100], [0x8100, 0x8100]]
for vlans, tpids in zip(vlans_list, tpids_list):
cmds = NSCmds()
cmds.set_vlan(MAC, vlans, tpids)
cmds.get_nodes_info([MAC])
c.set_namespace_start(0, cmds)
nodes = c.wait_for_async_results(0)[1]['result']['nodes']
assert(len(nodes) == 1)
node_info = nodes[0]
assert(node_info['vlan']['tags'] == vlans)
assert(node_info['vlan']['tpids'] == tpids)
finally:
c.namespace_remove_all()
| 33.205479
| 87
| 0.547649
| 2,015
| 14,544
| 3.738462
| 0.096774
| 0.018054
| 0.014337
| 0.054958
| 0.842692
| 0.806717
| 0.784946
| 0.768884
| 0.764901
| 0.754547
| 0
| 0.054688
| 0.317313
| 14,544
| 437
| 88
| 33.281465
| 0.703998
| 0.022071
| 0
| 0.765766
| 0
| 0.018018
| 0.11385
| 0
| 0
| 0
| 0.006074
| 0
| 0.072072
| 1
| 0.054054
| false
| 0
| 0.015015
| 0
| 0.096096
| 0.054054
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
790d18c000cbd34272ce5e58feb3eb2b358ab314
| 223
|
py
|
Python
|
models/layer/__init__.py
|
LegenDong/IQIYI_VID_FACE_2019
|
258ff9282206e7b7074ed9ada5ef928bc9305ec6
|
[
"MIT"
] | 17
|
2019-07-11T02:41:01.000Z
|
2022-01-13T05:13:24.000Z
|
models/layer/__init__.py
|
xmpy/IQIYI_VID_FACE_2019
|
258ff9282206e7b7074ed9ada5ef928bc9305ec6
|
[
"MIT"
] | 1
|
2021-04-16T15:37:12.000Z
|
2021-04-17T13:46:57.000Z
|
models/layer/__init__.py
|
LegenDong/IQIYI_VID_FACE_2019
|
258ff9282206e7b7074ed9ada5ef928bc9305ec6
|
[
"MIT"
] | 5
|
2019-07-23T02:18:04.000Z
|
2021-07-14T03:42:32.000Z
|
# -*- coding: utf-8 -*-
# @Time : 2019/5/11 15:12
# @Author : LegenDong
# @User : legendong
# @File : __init__.py.py
# @Software: PyCharm
from .channel_attention_layer import *
from .nan_attention_layer import *
| 22.3
| 38
| 0.654709
| 29
| 223
| 4.758621
| 0.793103
| 0.202899
| 0.289855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067039
| 0.197309
| 223
| 9
| 39
| 24.777778
| 0.703911
| 0.605381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f71b33566c8e0a884e4d1704ac06c8583ef46398
| 7,939
|
py
|
Python
|
tests/test_storage.py
|
gregdan3/limits
|
f2c693b9009afe27c9ecbb94492455ad470127f1
|
[
"MIT"
] | null | null | null |
tests/test_storage.py
|
gregdan3/limits
|
f2c693b9009afe27c9ecbb94492455ad470127f1
|
[
"MIT"
] | null | null | null |
tests/test_storage.py
|
gregdan3/limits
|
f2c693b9009afe27c9ecbb94492455ad470127f1
|
[
"MIT"
] | null | null | null |
import time
import pytest
from limits.errors import ConfigurationError
from limits.storage import (
MemcachedStorage,
MemoryStorage,
MongoDBStorage,
RedisClusterStorage,
RedisSentinelStorage,
RedisStorage,
Storage,
storage_from_string,
)
from limits.strategies import MovingWindowRateLimiter
class TestBaseStorage:
@pytest.mark.parametrize(
"uri, args, expected_instance, fixture",
[
("memory://", {}, MemoryStorage, None),
pytest.param(
"redis://localhost:7379",
{},
RedisStorage,
pytest.lazy_fixture("redis_basic"),
marks=pytest.mark.redis,
),
pytest.param(
"redis+unix:///tmp/limits.redis.sock",
{},
RedisStorage,
pytest.lazy_fixture("redis_uds"),
marks=pytest.mark.redis,
),
pytest.param(
"redis+unix://:password/tmp/limits.redis.sock",
{},
RedisStorage,
pytest.lazy_fixture("redis_uds"),
marks=pytest.mark.redis,
),
pytest.param(
"memcached://localhost:22122",
{},
MemcachedStorage,
pytest.lazy_fixture("memcached"),
marks=pytest.mark.memcached,
),
pytest.param(
"memcached://localhost:22122,localhost:22123",
{},
MemcachedStorage,
pytest.lazy_fixture("memcached_cluster"),
marks=pytest.mark.memcached,
),
pytest.param(
"memcached:///tmp/limits.memcached.sock",
{},
MemcachedStorage,
pytest.lazy_fixture("memcached_uds"),
marks=pytest.mark.memcached,
),
pytest.param(
"redis+sentinel://localhost:26379",
{"service_name": "localhost-redis-sentinel"},
RedisSentinelStorage,
pytest.lazy_fixture("redis_sentinel"),
marks=pytest.mark.redis_sentinel,
),
pytest.param(
"redis+sentinel://localhost:26379/localhost-redis-sentinel",
{},
RedisSentinelStorage,
pytest.lazy_fixture("redis_sentinel"),
marks=pytest.mark.redis_sentinel,
),
pytest.param(
"redis+sentinel://:sekret@localhost:26379/localhost-redis-sentinel",
{},
RedisSentinelStorage,
pytest.lazy_fixture("redis_sentinel_auth"),
marks=pytest.mark.redis_sentinel,
),
pytest.param(
"redis+cluster://localhost:7001/",
{},
RedisClusterStorage,
pytest.lazy_fixture("redis_cluster"),
marks=pytest.mark.redis_cluster,
),
pytest.param(
"mongodb://localhost:37017/",
{},
MongoDBStorage,
pytest.lazy_fixture("mongodb"),
marks=pytest.mark.mongodb,
),
],
)
def test_storage_string(self, uri, args, expected_instance, fixture):
assert isinstance(storage_from_string(uri, **args), expected_instance)
@pytest.mark.parametrize(
"uri, args", [("blah://", {}), ("redis+sentinel://localhost:26379", {})]
)
def test_invalid_storage_string(self, uri, args):
with pytest.raises(ConfigurationError):
storage_from_string(uri, **args)
@pytest.mark.parametrize(
"uri, args, fixture",
[
("memory://", {}, None),
pytest.param(
"redis://localhost:7379",
{},
pytest.lazy_fixture("redis_basic"),
marks=pytest.mark.redis,
),
pytest.param(
"redis+unix:///tmp/limits.redis.sock",
{},
pytest.lazy_fixture("redis_uds"),
marks=pytest.mark.redis,
),
pytest.param(
"redis+unix://:password/tmp/limits.redis.sock",
{},
pytest.lazy_fixture("redis_uds"),
marks=pytest.mark.redis,
),
pytest.param(
"memcached://localhost:22122",
{},
pytest.lazy_fixture("memcached"),
marks=pytest.mark.memcached,
),
pytest.param(
"memcached://localhost:22122,localhost:22123",
{},
pytest.lazy_fixture("memcached_cluster"),
marks=pytest.mark.memcached,
),
pytest.param(
"memcached:///tmp/limits.memcached.sock",
{},
pytest.lazy_fixture("memcached_uds"),
marks=pytest.mark.memcached,
),
pytest.param(
"redis+sentinel://localhost:26379",
{"service_name": "localhost-redis-sentinel"},
pytest.lazy_fixture("redis_sentinel"),
marks=pytest.mark.redis_sentinel,
),
pytest.param(
"redis+sentinel://localhost:26379/localhost-redis-sentinel",
{},
pytest.lazy_fixture("redis_sentinel"),
marks=pytest.mark.redis_sentinel,
),
pytest.param(
"redis+sentinel://:sekret@localhost:36379/localhost-redis-sentinel",
{},
pytest.lazy_fixture("redis_sentinel_auth"),
marks=pytest.mark.redis_sentinel,
),
pytest.param(
"redis+cluster://localhost:7001/",
{},
pytest.lazy_fixture("redis_cluster"),
marks=pytest.mark.redis_cluster,
),
pytest.param(
"mongodb://localhost:37017/",
{},
pytest.lazy_fixture("mongodb"),
marks=pytest.mark.mongodb,
),
],
)
def test_storage_check(self, uri, args, fixture):
assert storage_from_string(uri, **args).check()
def test_pluggable_storage_no_moving_window(self):
class MyStorage(Storage):
STORAGE_SCHEME = ["mystorage"]
def incr(self, key, expiry, elastic_expiry=False):
return
def get(self, key):
return 0
def get_expiry(self, key):
return time.time()
def reset(self):
return
def check(self):
return
def clear(self):
return
storage = storage_from_string("mystorage://")
assert isinstance(storage, MyStorage)
with pytest.raises(NotImplementedError):
MovingWindowRateLimiter(storage)
def test_pluggable_storage_moving_window(self):
class MyStorage(Storage):
STORAGE_SCHEME = ["mystorage"]
def incr(self, key, expiry, elastic_expiry=False):
return
def get(self, key):
return 0
def get_expiry(self, key):
return time.time()
def reset(self):
return
def check(self):
return
def clear(self):
return
def acquire_entry(self, *a, **k):
return True
def get_moving_window(self, *a, **k):
return (time.time(), 1)
storage = storage_from_string("mystorage://")
assert isinstance(storage, MyStorage)
MovingWindowRateLimiter(storage)
| 32.272358
| 84
| 0.493513
| 631
| 7,939
| 6.063391
| 0.137876
| 0.065342
| 0.097752
| 0.080502
| 0.817041
| 0.737846
| 0.720596
| 0.720596
| 0.714846
| 0.680868
| 0
| 0.019592
| 0.395642
| 7,939
| 245
| 85
| 32.404082
| 0.777824
| 0
| 0
| 0.837104
| 0
| 0
| 0.169417
| 0.115884
| 0
| 0
| 0
| 0
| 0.0181
| 1
| 0.085973
| false
| 0.00905
| 0.022624
| 0.063348
| 0.18552
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f74586a9a2e957cdc6a027edd7f741dc21f08c31
| 95
|
py
|
Python
|
helli5/contextprocessor.py
|
TheMn/internet-engineering-project
|
e41536552feff6f806ba099922df95e89da5bd31
|
[
"Apache-2.0"
] | 7
|
2019-10-19T12:58:11.000Z
|
2020-11-05T07:02:17.000Z
|
helli5/contextprocessor.py
|
TheMn/internet-engineering-project
|
e41536552feff6f806ba099922df95e89da5bd31
|
[
"Apache-2.0"
] | 35
|
2019-12-06T16:31:07.000Z
|
2022-03-12T00:56:35.000Z
|
helli5/contextprocessor.py
|
TheMn/internet-engineering-project
|
e41536552feff6f806ba099922df95e89da5bd31
|
[
"Apache-2.0"
] | 1
|
2019-10-18T19:07:04.000Z
|
2019-10-18T19:07:04.000Z
|
from datetime import datetime
def time_now(request):
return {'time_now': datetime.now()}
| 15.833333
| 39
| 0.726316
| 13
| 95
| 5.153846
| 0.615385
| 0.208955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 95
| 5
| 40
| 19
| 0.8375
| 0
| 0
| 0
| 0
| 0
| 0.084211
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
f746211f667f9006fea8f6ab5626f3b922ce6c61
| 19,004
|
py
|
Python
|
SimModel_Python_API/simmodel_swig/Release/SimSpace_Occupied_Default.py
|
EnEff-BIM/EnEffBIM-Framework
|
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
|
[
"MIT"
] | 3
|
2016-05-30T15:12:16.000Z
|
2022-03-22T08:11:13.000Z
|
SimModel_Python_API/simmodel_swig/Release/SimSpace_Occupied_Default.py
|
EnEff-BIM/EnEffBIM-Framework
|
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
|
[
"MIT"
] | 21
|
2016-06-13T11:33:45.000Z
|
2017-05-23T09:46:52.000Z
|
SimModel_Python_API/simmodel_swig/Release/SimSpace_Occupied_Default.py
|
EnEff-BIM/EnEffBIM-Framework
|
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
|
[
"MIT"
] | null | null | null |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimSpace_Occupied_Default', [dirname(__file__)])
except ImportError:
import _SimSpace_Occupied_Default
return _SimSpace_Occupied_Default
if fp is not None:
try:
_mod = imp.load_module('_SimSpace_Occupied_Default', fp, pathname, description)
finally:
fp.close()
return _mod
_SimSpace_Occupied_Default = swig_import_helper()
del swig_import_helper
else:
import _SimSpace_Occupied_Default
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
import base
class SimSpace(base.SimSpatialStructureElement):
__swig_setmethods__ = {}
for _s in [base.SimSpatialStructureElement]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSpace, name, value)
__swig_getmethods__ = {}
for _s in [base.SimSpatialStructureElement]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSpace, name)
__repr__ = _swig_repr
def SpaceZoneAssignments(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceZoneAssignments(self, *args)
def SpaceNumber(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceNumber(self, *args)
def SpaceName(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceName(self, *args)
def SpaceInteriorOrExterior(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceInteriorOrExterior(self, *args)
def SpaceDatumElevation(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceDatumElevation(self, *args)
def SpaceThermalSimulationType(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceThermalSimulationType(self, *args)
def SpaceConditioningRequirement(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceConditioningRequirement(self, *args)
def SpaceOccupantDensity(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupantDensity(self, *args)
def SpaceOccupantHeatRateLatent(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupantHeatRateLatent(self, *args)
def SpaceOccupantHeatRateSensible(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupantHeatRateSensible(self, *args)
def SpaceOccupantLoad(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupantLoad(self, *args)
def SpaceEquipmentLoad(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceEquipmentLoad(self, *args)
def SpaceLightingLoad(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceLightingLoad(self, *args)
def InsideDryBulbTempHeating(self, *args):
return _SimSpace_Occupied_Default.SimSpace_InsideDryBulbTempHeating(self, *args)
def InsideRelHumidityHeating(self, *args):
return _SimSpace_Occupied_Default.SimSpace_InsideRelHumidityHeating(self, *args)
def InsideDryBulbTempCooling(self, *args):
return _SimSpace_Occupied_Default.SimSpace_InsideDryBulbTempCooling(self, *args)
def InsideRelHumidityCooling(self, *args):
return _SimSpace_Occupied_Default.SimSpace_InsideRelHumidityCooling(self, *args)
def IncludesReturnAirPlenum(self, *args):
return _SimSpace_Occupied_Default.SimSpace_IncludesReturnAirPlenum(self, *args)
def PeakAirFlowCooling(self, *args):
return _SimSpace_Occupied_Default.SimSpace_PeakAirFlowCooling(self, *args)
def PeakAirFlowHeating(self, *args):
return _SimSpace_Occupied_Default.SimSpace_PeakAirFlowHeating(self, *args)
def ExhaustAirFlowRate(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ExhaustAirFlowRate(self, *args)
def NaturalAirChangeRate(self, *args):
return _SimSpace_Occupied_Default.SimSpace_NaturalAirChangeRate(self, *args)
def MechanicalAirChangeRate(self, *args):
return _SimSpace_Occupied_Default.SimSpace_MechanicalAirChangeRate(self, *args)
def VentilationType(self, *args):
return _SimSpace_Occupied_Default.SimSpace_VentilationType(self, *args)
def OutsideAirPerPerson(self, *args):
return _SimSpace_Occupied_Default.SimSpace_OutsideAirPerPerson(self, *args)
def SpaceHeight(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceHeight(self, *args)
def SpaceGrossPerimeter(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceGrossPerimeter(self, *args)
def SpaceGrossFloorArea(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceGrossFloorArea(self, *args)
def SpaceNetFloorArea(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceNetFloorArea(self, *args)
def SpaceGrossVolume(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceGrossVolume(self, *args)
def SpaceNetVolume(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceNetVolume(self, *args)
def SpaceNetFloorAreaBOMA(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceNetFloorAreaBOMA(self, *args)
def SpaceUsableFloorAreaBOMA(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceUsableFloorAreaBOMA(self, *args)
def ClassRef_SpaceByFunction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ClassRef_SpaceByFunction(self, *args)
def ClassRef_SpaceTypeOwner(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ClassRef_SpaceTypeOwner(self, *args)
def ClassRef_SpaceCategoryOwner(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ClassRef_SpaceCategoryOwner(self, *args)
def ClassRef_SpaceCategoryBOMA(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ClassRef_SpaceCategoryBOMA(self, *args)
def SpaceOccupants(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupants(self, *args)
def OccupancyScheduleAssignment(self, *args):
return _SimSpace_Occupied_Default.SimSpace_OccupancyScheduleAssignment(self, *args)
def LightingScheduleAssignment(self, *args):
return _SimSpace_Occupied_Default.SimSpace_LightingScheduleAssignment(self, *args)
def EquipmentScheduleAssignment(self, *args):
return _SimSpace_Occupied_Default.SimSpace_EquipmentScheduleAssignment(self, *args)
def GeometricRepresentations(self, *args):
return _SimSpace_Occupied_Default.SimSpace_GeometricRepresentations(self, *args)
def SpaceInSpatialContainer(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceInSpatialContainer(self, *args)
def AssociatedPlena(self, *args):
return _SimSpace_Occupied_Default.SimSpace_AssociatedPlena(self, *args)
def AssociatedElements(self, *args):
return _SimSpace_Occupied_Default.SimSpace_AssociatedElements(self, *args)
def __init__(self, *args):
this = _SimSpace_Occupied_Default.new_SimSpace(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimSpace_Occupied_Default.SimSpace__clone(self, f, c)
__swig_destroy__ = _SimSpace_Occupied_Default.delete_SimSpace
__del__ = lambda self: None
SimSpace_swigregister = _SimSpace_Occupied_Default.SimSpace_swigregister
SimSpace_swigregister(SimSpace)
class SimSpace_Occupied(SimSpace):
__swig_setmethods__ = {}
for _s in [SimSpace]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSpace_Occupied, name, value)
__swig_getmethods__ = {}
for _s in [SimSpace]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSpace_Occupied, name)
__repr__ = _swig_repr
def T24CommRefrigEPD(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigEPD(self, *args)
def T24CommRefrigEqmtSchedRef(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigEqmtSchedRef(self, *args)
def T24CommRefrigLatentFraction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigLatentFraction(self, *args)
def T24CommRefrigLostFraction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigLostFraction(self, *args)
def T24CommRefrigRadFraction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigRadFraction(self, *args)
def T24EnvelopeStatus(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24EnvelopeStatus(self, *args)
def T24ExhaustAirChangesPerHour(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24ExhaustAirChangesPerHour(self, *args)
def T24ExhaustPerArea(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24ExhaustPerArea(self, *args)
def T24ExhaustPerSpace(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24ExhaustPerSpace(self, *args)
def T24HasProcessExhaust(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24HasProcessExhaust(self, *args)
def T24IntLightingSpecMethod(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24IntLightingSpecMethod(self, *args)
def T24KitchExhHoodDutyList(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24KitchExhHoodDutyList(self, *args)
def T24KitchExhHoodFlowList(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24KitchExhHoodFlowList(self, *args)
def T24KitchExhHoodLengthList(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24KitchExhHoodLengthList(self, *args)
def T24KitchExhHoodStyleList(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24KitchExhHoodStyleList(self, *args)
def T24LabExhRateType(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24LabExhRateType(self, *args)
def T24LightingStatus(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24LightingStatus(self, *args)
def T24MandLightCntrlCntRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlCntRpt(self, *args)
def T24MandLightCntrlDescRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlDescRpt(self, *args)
def T24MandLightCntrlAccepReqRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlAccepReqRpt(self, *args)
def T24MandLightCntrlIsAutoShOffCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsAutoShOffCntrlRpt(self, *args)
def T24MandLightCntrlIsDayltngCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsDayltngCntrlRpt(self, *args)
def T24MandLightCntrlIsDmndRespCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsDmndRespCntrlRpt(self, *args)
def T24MandLightCntrlIsManAreaCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsManAreaCntrlRpt(self, *args)
def T24MandLightCntrlIsMultLvlCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsMultLvlCntrlRpt(self, *args)
def T24SkylightRqmtExcep(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24SkylightRqmtExcep(self, *args)
def T24SpaceFunction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24SpaceFunction(self, *args)
def T24ConstructStatus3(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24ConstructStatus3(self, *args)
def __init__(self, *args):
this = _SimSpace_Occupied_Default.new_SimSpace_Occupied(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimSpace_Occupied_Default.SimSpace_Occupied__clone(self, f, c)
__swig_destroy__ = _SimSpace_Occupied_Default.delete_SimSpace_Occupied
__del__ = lambda self: None
SimSpace_Occupied_swigregister = _SimSpace_Occupied_Default.SimSpace_Occupied_swigregister
SimSpace_Occupied_swigregister(SimSpace_Occupied)
class SimSpace_Occupied_Default(SimSpace_Occupied):
__swig_setmethods__ = {}
for _s in [SimSpace_Occupied]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSpace_Occupied_Default, name, value)
__swig_getmethods__ = {}
for _s in [SimSpace_Occupied]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSpace_Occupied_Default, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimSpace_Occupied_Default.new_SimSpace_Occupied_Default(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default__clone(self, f, c)
__swig_destroy__ = _SimSpace_Occupied_Default.delete_SimSpace_Occupied_Default
__del__ = lambda self: None
SimSpace_Occupied_Default_swigregister = _SimSpace_Occupied_Default.SimSpace_Occupied_Default_swigregister
SimSpace_Occupied_Default_swigregister(SimSpace_Occupied_Default)
class SimSpace_Occupied_Default_sequence(base.sequence_common):
__swig_setmethods__ = {}
for _s in [base.sequence_common]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSpace_Occupied_Default_sequence, name, value)
__swig_getmethods__ = {}
for _s in [base.sequence_common]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSpace_Occupied_Default_sequence, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimSpace_Occupied_Default.new_SimSpace_Occupied_Default_sequence(*args)
try:
self.this.append(this)
except:
self.this = this
def assign(self, n, x):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_assign(self, n, x)
def begin(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_begin(self, *args)
def end(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_end(self, *args)
def rbegin(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_rbegin(self, *args)
def rend(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_rend(self, *args)
def at(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_at(self, *args)
def front(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_front(self, *args)
def back(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_back(self, *args)
def push_back(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_push_back(self, *args)
def pop_back(self):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_pop_back(self)
def detach_back(self, pop=True):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_detach_back(self, pop)
def insert(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_insert(self, *args)
def erase(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_erase(self, *args)
def detach(self, position, r, erase=True):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_detach(self, position, r, erase)
def swap(self, x):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_swap(self, x)
__swig_destroy__ = _SimSpace_Occupied_Default.delete_SimSpace_Occupied_Default_sequence
__del__ = lambda self: None
SimSpace_Occupied_Default_sequence_swigregister = _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_swigregister
SimSpace_Occupied_Default_sequence_swigregister(SimSpace_Occupied_Default_sequence)
# This file is compatible with both classic and new-style classes.
| 40.868817
| 124
| 0.756051
| 1,981
| 19,004
| 6.770318
| 0.110045
| 0.219505
| 0.245228
| 0.221891
| 0.571056
| 0.549955
| 0.530421
| 0.370042
| 0.212123
| 0.183567
| 0
| 0.008261
| 0.165544
| 19,004
| 464
| 125
| 40.956897
| 0.837495
| 0.01547
| 0
| 0.233728
| 1
| 0
| 0.015294
| 0.002781
| 0
| 0
| 0
| 0
| 0
| 1
| 0.298817
| false
| 0.005917
| 0.032544
| 0.275148
| 0.730769
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
f74fdd12bdfcffe6285b81785646f57aab4459ff
| 10,581
|
py
|
Python
|
tests/test_mnist.py
|
RainingComputers/pykitml
|
1c3e50cebcdb6c4da63979ef9a812b44d23a4857
|
[
"MIT"
] | 34
|
2020-03-06T07:53:43.000Z
|
2022-03-13T06:12:29.000Z
|
tests/test_mnist.py
|
RainingComputers/pykitml
|
1c3e50cebcdb6c4da63979ef9a812b44d23a4857
|
[
"MIT"
] | 6
|
2021-06-08T22:43:23.000Z
|
2022-03-08T13:57:33.000Z
|
tests/test_mnist.py
|
RainingComputers/pykitml
|
1c3e50cebcdb6c4da63979ef9a812b44d23a4857
|
[
"MIT"
] | 1
|
2020-11-30T21:20:32.000Z
|
2020-11-30T21:20:32.000Z
|
import sys
import os.path
import numpy as np
import pykitml as pk
from pykitml.datasets import mnist
from pykitml.testing import pktest_graph, pktest_nograph
def test_download():
# Download the mnist data set
mnist.get()
# Test ran successfully
assert True
@pktest_graph
def test_adagrad():
# Load dataset
training_data, training_targets, testing_data, testing_targets = mnist.load()
# Create a new neural network
digit_classifier = pk.NeuralNetwork([784, 100, 10])
# Train it
digit_classifier.train(
training_data=training_data,
targets=training_targets,
batch_size=50,
epochs=1200,
optimizer=pk.Adagrad(learning_rate=0.07, decay_rate=0.99),
testing_data=testing_data,
testing_targets=testing_targets,
testing_freq=30,
decay_freq=10
)
# Save it
pk.save(digit_classifier, 'digit_classifier_network.pkl')
# Show performance
accuracy = digit_classifier.accuracy(training_data, training_targets)
print('Train Accuracy:', accuracy)
accuracy = digit_classifier.accuracy(testing_data, testing_targets)
print('Test Accuracy:', accuracy)
# Plot performance graph
digit_classifier.plot_performance()
# Show confusion matrix
digit_classifier.confusion_matrix(training_data, training_targets)
# Assert if it has enough accuracy
assert digit_classifier.accuracy(training_data, training_targets) > 94
@pktest_graph
def test_nesterov():
# Load dataset
training_data, training_targets, testing_data, testing_targets = mnist.load()
# Create a new neural network
digit_classifier = pk.NeuralNetwork([784, 100, 10])
# Train it
digit_classifier.train(
training_data=training_data,
targets=training_targets,
batch_size=50,
epochs=1200,
optimizer=pk.Nesterov(learning_rate=0.1, decay_rate=0.99),
testing_data=testing_data,
testing_targets=testing_targets,
testing_freq=30,
decay_freq=10
)
# Save it
pk.save(digit_classifier, 'digit_classifier_network.pkl')
# Show performance
accuracy = digit_classifier.accuracy(training_data, training_targets)
print('Train Accuracy:', accuracy)
accuracy = digit_classifier.accuracy(testing_data, testing_targets)
print('Test Accuracy:', accuracy)
# Plot performance graph
digit_classifier.plot_performance()
# Show confusion matrix
digit_classifier.confusion_matrix(training_data, training_targets)
# Assert if it has enough accuracy
assert digit_classifier.accuracy(training_data, training_targets) > 94
@pktest_graph
def test_relu_nesterov():
# Load dataset
training_data, training_targets, testing_data, testing_targets = mnist.load()
# Create a new neural network
digit_classifier = pk.NeuralNetwork([784, 100, 10], config='relu-softmax-cross_entropy')
# Train it
digit_classifier.train(
training_data=training_data,
targets=training_targets,
batch_size=50,
epochs=1200,
optimizer=pk.Nesterov(learning_rate=0.1, decay_rate=0.99),
testing_data=testing_data,
testing_targets=testing_targets,
testing_freq=30,
decay_freq=10
)
# Save it
pk.save(digit_classifier, 'digit_classifier_network.pkl')
# Show performance
accuracy = digit_classifier.accuracy(training_data, training_targets)
print('Train Accuracy:', accuracy)
accuracy = digit_classifier.accuracy(testing_data, testing_targets)
print('Test Accuracy:', accuracy)
# Plot performance graph
digit_classifier.plot_performance()
# Show confusion matrix
digit_classifier.confusion_matrix(training_data, training_targets)
# Assert if it has enough accuracy
assert digit_classifier.accuracy(training_data, training_targets) > 94
@pktest_graph
def test_momentum():
# Load dataset
training_data, training_targets, testing_data, testing_targets = mnist.load()
# Create a new neural network
digit_classifier = pk.NeuralNetwork([784, 100, 10])
# Train it
digit_classifier.train(
training_data=training_data,
targets=training_targets,
batch_size=50,
epochs=1200,
optimizer=pk.Momentum(learning_rate=0.1, decay_rate=0.95),
testing_data=testing_data,
testing_targets=testing_targets,
testing_freq=30,
decay_freq=20
)
# Save it
pk.save(digit_classifier, 'digit_classifier_network.pkl')
# Show performance
accuracy = digit_classifier.accuracy(training_data, training_targets)
print('Train Accuracy:', accuracy)
accuracy = digit_classifier.accuracy(testing_data, testing_targets)
print('Test Accuracy:', accuracy)
# Plot performance graph
digit_classifier.plot_performance()
# Show confusion matrix
digit_classifier.confusion_matrix(training_data, training_targets)
# Assert if it has enough accuracy
assert digit_classifier.accuracy(training_data, training_targets) > 94
@pktest_graph
def test_gradient_descent():
# Load dataset
training_data, training_targets, testing_data, testing_targets = mnist.load()
# Create a new neural network
digit_classifier = pk.NeuralNetwork([784, 100, 10])
# Train it
digit_classifier.train(
training_data=training_data,
targets=training_targets,
batch_size=50,
epochs=1200,
optimizer=pk.GradientDescent(learning_rate=0.2, decay_rate=0.99),
testing_data=testing_data,
testing_targets=testing_targets,
testing_freq=30,
decay_freq=20
)
# Save it
pk.save(digit_classifier, 'digit_classifier_network.pkl')
# Show performance
accuracy = digit_classifier.accuracy(training_data, training_targets)
print('Train Accuracy:', accuracy)
accuracy = digit_classifier.accuracy(testing_data, testing_targets)
print('Test Accuracy:', accuracy)
# Plot performance graph
digit_classifier.plot_performance()
# Show confusion matrix
digit_classifier.confusion_matrix(training_data, training_targets)
# Assert if it has enough accuracy
assert digit_classifier.accuracy(training_data, training_targets) > 92
@pktest_graph
def test_RMSprop():
# Load dataset
training_data, training_targets, testing_data, testing_targets = mnist.load()
# Create a new neural network
digit_classifier = pk.NeuralNetwork([784, 100, 10])
# Train it
digit_classifier.train(
training_data=training_data,
targets=training_targets,
batch_size=50,
epochs=1200,
optimizer=pk.RMSprop(learning_rate=0.012, decay_rate=0.95),
testing_data=testing_data,
testing_targets=testing_targets,
testing_freq=30,
decay_freq=15
)
# Save it
pk.save(digit_classifier, 'digit_classifier_network.pkl')
# Show performance
accuracy = digit_classifier.accuracy(training_data, training_targets)
print('Train Accuracy:', accuracy)
accuracy = digit_classifier.accuracy(testing_data, testing_targets)
print('Test Accuracy:', accuracy)
# Plot performance graph
digit_classifier.plot_performance()
# Show confusion matrix
digit_classifier.confusion_matrix(training_data, training_targets)
# Assert if it has enough accuracy
assert digit_classifier.accuracy(training_data, training_targets) > 95
@pktest_graph
def test_adam():
import os.path
import numpy as np
import pykitml as pk
from pykitml.datasets import mnist
# Download dataset
if(not os.path.exists('mnist.pkl')): mnist.get()
# Load dataset
training_data, training_targets, testing_data, testing_targets = mnist.load()
# Create a new neural network
digit_classifier = pk.NeuralNetwork([784, 100, 10])
# Train it
digit_classifier.train(
training_data=training_data,
targets=training_targets,
batch_size=50,
epochs=1200,
optimizer=pk.Adam(learning_rate=0.012, decay_rate=0.95),
testing_data=testing_data,
testing_targets=testing_targets,
testing_freq=30,
decay_freq=15
)
# Save it
pk.save(digit_classifier, 'digit_classifier_network.pkl')
# Show performance
accuracy = digit_classifier.accuracy(training_data, training_targets)
print('Train Accuracy:', accuracy)
accuracy = digit_classifier.accuracy(testing_data, testing_targets)
print('Test Accuracy:', accuracy)
# Plot performance graph
digit_classifier.plot_performance()
# Show confusion matrix
digit_classifier.confusion_matrix(training_data, training_targets)
# Assert if it has enough accuracy
assert digit_classifier.accuracy(training_data, training_targets) > 95
@pktest_graph
def test_predict_mnist_adam():
import random
import numpy as np
import matplotlib.pyplot as plt
import pykitml as pk
from pykitml.datasets import mnist
# Load dataset
training_data, training_targets, testing_data, testing_targets = mnist.load()
# Load the trained network
digit_classifier = pk.load('digit_classifier_network.pkl')
# Pick a random example from testing data
index = random.randint(0, 9999)
# Show the test data and the label
plt.imshow(training_data[index].reshape(28, 28))
plt.show()
print('Label: ', training_targets[index])
# Show prediction
digit_classifier.feed(training_data[index])
model_output = digit_classifier.get_output_onehot()
print('Predicted: ', model_output)
if __name__ == '__main__':
# List of optimizers
optimizers = [
'gradient_descent', 'momentum', 'nesterov',
'adagrad', 'RMSprop', 'adam'
]
# Check if arguments passed to the script is correct
if(len(sys.argv) != 2 or sys.argv[1] not in optimizers):
print('Usage: python3 test_mnist.py OPTIMIZER')
print('List of available optimizers:')
print(str(optimizers))
exit()
# If the dataset is not available then download it
if(not os.path.exists('mnist.pkl')): mnist.get()
# Run the requested optimizer test function
try:
locals()['test_'+sys.argv[1]].__wrapped__()
test_predict_mnist_adam.__wrapped__()
except AssertionError:
pass
| 30.492795
| 92
| 0.697004
| 1,249
| 10,581
| 5.653323
| 0.117694
| 0.142331
| 0.101969
| 0.110891
| 0.840533
| 0.837558
| 0.837558
| 0.835576
| 0.835576
| 0.819572
| 0
| 0.023439
| 0.221813
| 10,581
| 346
| 93
| 30.580925
| 0.834103
| 0.134675
| 0
| 0.748792
| 0
| 0
| 0.068119
| 0.027512
| 0
| 0
| 0
| 0
| 0.043478
| 1
| 0.043478
| false
| 0.004831
| 0.072464
| 0
| 0.115942
| 0.091787
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7957644392e9a66aba5c2b90d9fb4e729e366ec
| 4,426
|
py
|
Python
|
examples/undocumented/python/tests_check_commwordkernel_memleak.py
|
cloner1984/shogun
|
901c04b2c6550918acf0594ef8afeb5dcd840a7d
|
[
"BSD-3-Clause"
] | 2
|
2015-01-13T15:18:27.000Z
|
2015-05-01T13:28:48.000Z
|
examples/undocumented/python/tests_check_commwordkernel_memleak.py
|
cloner1984/shogun
|
901c04b2c6550918acf0594ef8afeb5dcd840a7d
|
[
"BSD-3-Clause"
] | null | null | null |
examples/undocumented/python/tests_check_commwordkernel_memleak.py
|
cloner1984/shogun
|
901c04b2c6550918acf0594ef8afeb5dcd840a7d
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
parameter_list=[[10,7,0,False]]
def tests_check_commwordkernel_memleak (num, order, gap, reverse):
import gc
from shogun import Alphabet,StringCharFeatures,StringWordFeatures,DNA
from shogun import SortWordString, MSG_DEBUG
from shogun import CommWordStringKernel, IdentityKernelNormalizer
from numpy import mat
POS=[num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'TTGT', num*'TTGT', num*'TTGT',num*'TTGT', num*'TTGT',
num*'TTGT',num*'TTGT', num*'TTGT', num*'TTGT',num*'TTGT', num*'TTGT',
num*'TTGT',num*'TTGT', num*'TTGT', num*'TTGT',num*'TTGT', num*'TTGT',
num*'TTGT',num*'TTGT', num*'TTGT', num*'TTGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT']
NEG=[num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'TTGT', num*'TTGT', num*'TTGT',num*'TTGT', num*'TTGT',
num*'TTGT',num*'TTGT', num*'TTGT', num*'TTGT',num*'TTGT', num*'TTGT',
num*'TTGT',num*'TTGT', num*'TTGT', num*'TTGT',num*'TTGT', num*'TTGT',
num*'TTGT',num*'TTGT', num*'TTGT', num*'TTGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT',num*'ACGT', num*'ACGT',
num*'ACGT',num*'ACGT', num*'ACGT', num*'ACGT']
for i in range(10):
alpha=Alphabet(DNA)
traindat=StringCharFeatures(alpha)
traindat.set_features(POS+NEG)
trainudat=StringWordFeatures(traindat.get_alphabet());
trainudat.obtain_from_char(traindat, order-1, order, gap, reverse)
#trainudat.io.set_loglevel(MSG_DEBUG)
pre = SortWordString()
#pre.io.set_loglevel(MSG_DEBUG)
pre.fit(trainudat)
trainudat = pre.transform(trainudat)
spec = CommWordStringKernel(10, False)
spec.set_normalizer(IdentityKernelNormalizer())
spec.init(trainudat, trainudat)
K=spec.get_kernel_matrix()
del POS
del NEG
del order
del gap
del reverse
return K
if __name__=='__main__':
print('Leak Check Comm Word Kernel')
tests_check_commwordkernel_memleak(*parameter_list[0])
| 51.465116
| 70
| 0.638726
| 706
| 4,426
| 3.964589
| 0.100567
| 0.600214
| 0.850304
| 1.180422
| 0.722401
| 0.722401
| 0.705252
| 0.705252
| 0.705252
| 0.705252
| 0
| 0.002462
| 0.082241
| 4,426
| 85
| 71
| 52.070588
| 0.686608
| 0.019431
| 0
| 0.597403
| 0
| 0
| 0.268158
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012987
| false
| 0
| 0.064935
| 0
| 0.090909
| 0.012987
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f7a14c3c1dbfee4aca574d131b9060d0e43b9b6e
| 16,893
|
py
|
Python
|
episode02-mozart/turkish_march_1.py
|
ales-eri/edison-robot-ideas
|
fd60f7172192c017326c52727c0aa08e7b1f5368
|
[
"Unlicense"
] | 7
|
2019-02-20T17:06:22.000Z
|
2021-12-07T02:14:26.000Z
|
episode02-mozart/turkish_march_1.py
|
ales-eri/edison-robot-ideas
|
fd60f7172192c017326c52727c0aa08e7b1f5368
|
[
"Unlicense"
] | null | null | null |
episode02-mozart/turkish_march_1.py
|
ales-eri/edison-robot-ideas
|
fd60f7172192c017326c52727c0aa08e7b1f5368
|
[
"Unlicense"
] | 6
|
2021-01-30T13:30:41.000Z
|
2022-03-09T19:42:04.000Z
|
#
# Description: Edison robot plays "Turkish March" by Wolfgang Amadeus Mozart.
# Notes: You can listen result here: https://www.youtube.com/watch?v=1Cfpv6s_xrY
# You can upload program to Edison robot by online EdPy environment http://edpyapp.com/#
# This is the first voice only. I wrote three programs (one for every voice),
# but I had to modify it for every my robot, because they run program in different speed. I do not know why.
# If you have some questions, ask me in comments of the video: https://www.youtube.com/watch?v=1Cfpv6s_xrY
#
import Ed
Ed.EdisonVersion = Ed.V2
Ed.DistanceUnits = Ed.CM
Ed.Tempo = Ed.TEMPO_FAST
tones = Ed.List(46, [
32396, # 00 B5 987.767 Hz
30578, # 01 C6 1046.502 Hz
28862, # 02 #C6 1108.731 Hz
27242, # 03 D6 1174.659 Hz
25713, # 04 #D6 1244.508 Hz
24270, # 05 E6 1318.510 Hz
22908, # 06 F6 1396.913 Hz
21622, # 07 #F6 1479.978 Hz
20408, # 08 G6 1567.982 Hz
19263, # 09 #G6 1661.219 Hz
18181, # 10 A6 1760.000 Hz
17161, # 11 #A6 1864.655 Hz
16202, # 12 B6 1975.533 Hz
15289, # 13 C7 2093.005 Hz
14431, # 14 #C7 2217.461 Hz
13622, # 15 D7 2349.318 Hz
12856, # 16 #D7 2489.016 Hz
12135, # 17 E7 2637.021 Hz
11457, # 18 F7 2793.826 Hz
10811, # 19 #F7 2959.956 Hz
10207, # 20 G7 3135.964 Hz
9631, # 21 #G7 3322.438 Hz
9090, # 22 A7 3520.000 Hz
8581, # 23 #A7 3729.310 Hz
8099, # 24 B7 3951.066 Hz
7645, # 25 C8 4186.009 Hz
7215, # 26 #C8 4434.922 Hz
6810, # 27 D8 4698.637 Hz
6428, # 28 #D8 4978.032 Hz
6067, # 29 E8 5274.042 Hz
5727, # 30 F8 5587.652 Hz
5405, # 31 #F8 5919.912 Hz
5102, # 32 G8 6271.928 Hz
4816, # 33 #G8 6644.876 Hz
4545, # 34 A8 7040.000 Hz
4290, # 35 #A8 7458.620 Hz
4050, # 36 B8 7902.133 Hz
3822, # 37 C9 8372.019 Hz
3608, # 38 #C9 8869.845 Hz
3405, # 39 D9 9397.273 Hz
3214, # 40 #D9 9956.064 Hz
3034, # 41 E9 10548.083 Hz
2863, # 42 F9 11175.305 Hz
2703, # 43 #F9 11839.823 Hz
2551, # 44 G9 12543.855 Hz
2408]) # 45 #G9 13289.752 Hz
# transpose music up or down
#TRANSPOSITION = -6
TRANSPOSITION = -10
# tone length constants (duration + action)
LENGTH_0 = 0
LENGTH_1 = 1 | 128
LENGTH_2 = 2 | 128
LENGTH_4 = 4 | 128
LENGTH_8 = 8 | 128
LENGTH_16 = 16 | 128
# This method must be called eighth time in every 2/4 measure or twelve time in 3/4 measure
#
# Inputs:
# - length: LENGTH_0 - do not play any tone and do not interrupt previous tone
# LENGTH_1 - sixteenth
# LENGTH_2 - eighth
# LENGTH_4 - quarter
# LENGTH_8 - half
# LENGTH_16 - whole
# - tone: index to table above.
#
# Time consumption of this method must be same for every input!
# We must not use IFs or conditions - it is compiled as conditional jump and breaks time synchronization between robots
# We also must use internal methods, because public methods use IFs
def playTone(length, tone):
# turn on or off LEDs
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, length >> 7)
Ed.WriteModuleRegister8Bit(Ed.MODULE_RIGHT_LED, Ed.REG_LED_OUTPUT_8, length >> 7)
# transpose and set tone
Ed.WriteModuleRegister16Bit(Ed.MODULE_BEEPER, Ed.REG_BEEP_FREQ_16, tones[tone + TRANSPOSITION])
# real duration (1,2,4,8,16) is separate from input by masked upper bits
# expression ((length & 127) * A + B) must be changed carefully together with length of empty cycle and with new commands
Ed.WriteModuleRegister16Bit(Ed.MODULE_BEEPER, Ed.REG_BEEP_DURATION_16, (length & 127) * 7 + 1)
# length constant for playing (LENGTH_1 .. LENGTH_16) has 7th bit == 1, LENGTH_0 has 7th bit == 0
# we convert it by shift to 2 (for playing) or 0 (for do nothing)
Ed.WriteModuleRegister8Bit(Ed.MODULE_BEEPER, Ed.REG_BEEP_ACTION_8, length >> 6)
t = 0
# turn off LEDs
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0)
Ed.WriteModuleRegister8Bit(Ed.MODULE_RIGHT_LED, Ed.REG_LED_OUTPUT_8, 0)
# empty cycle for decreasing tempo
while t < 5:
t = t + 1
# ------ Main Program --------
# wait 200 milliseconds to avoid triggering clap sensor by play button
Ed.WriteModuleRegister16Bit(Ed.MODULE_TIMERS, Ed.REG_TIMER_PAUSE_16, 20)
Ed.WriteModuleRegister8Bit(Ed.MODULE_TIMERS, Ed.REG_TIMER_ACTION_8, 2)
Ed.ClearModuleRegisterBit(Ed.MODULE_BEEPER, Ed.REG_BEEP_STATUS_8, Ed.CLAP_DETECTED_BIT)
while Ed.ReadModuleRegister8Bit(Ed.MODULE_BEEPER, Ed.REG_BEEP_STATUS_8) != 4:
pass
#PAUSE IS 26, we can choose any value, but it must be between 0-46 after transposition
playTone(LENGTH_0, 26)#r
playTone(LENGTH_0, 26)
playTone(LENGTH_0, 26)
playTone(LENGTH_0, 26)
playTone(LENGTH_0, 26)#r
playTone(LENGTH_0, 26)
playTone(LENGTH_0, 26)
playTone(LENGTH_0, 26)
repeat = 0
while repeat < 2:
playTone(LENGTH_1, 24)#b7
playTone(LENGTH_1, 22)#a7
playTone(LENGTH_1, 21)##g7
playTone(LENGTH_1, 22)#a7
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 25)#c8
playTone(LENGTH_0, 25)
playTone(LENGTH_0, 25)
playTone(LENGTH_0, 25)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 27)#d8
playTone(LENGTH_1, 25)#c8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 24)#b7
playTone(LENGTH_1, 25)#c8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 29)#e8
playTone(LENGTH_0, 29)
playTone(LENGTH_0, 29)
playTone(LENGTH_0, 29)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 30)#f8
playTone(LENGTH_1, 29)#e8
playTone(LENGTH_1, 28)##d8
playTone(LENGTH_1, 29)#e8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 36)#b8
playTone(LENGTH_1, 34)#a8
playTone(LENGTH_1, 33)##g8
playTone(LENGTH_1, 34)#a8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 36)#b8
playTone(LENGTH_1, 34)#a8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 33)##g8
playTone(LENGTH_1, 34)#a8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 37)#c9
playTone(LENGTH_0, 37)
playTone(LENGTH_0, 37)
playTone(LENGTH_0, 37)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 34)#a8
playTone(LENGTH_0, 34)
playTone(LENGTH_2, 37)#c9
playTone(LENGTH_0, 37)#
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 36)#b8
playTone(LENGTH_0, 36)
playTone(LENGTH_2, 34)#a8
playTone(LENGTH_0, 34)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 32)#g8
playTone(LENGTH_0, 32)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 34)#a8
playTone(LENGTH_0, 34)#a8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 36)#b8
playTone(LENGTH_0, 36)
playTone(LENGTH_2, 34)#a8
playTone(LENGTH_0, 34)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 32)#g8
playTone(LENGTH_0, 32)
playTone(LENGTH_2, 34)#a8
playTone(LENGTH_0, 34)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 36)#b8
playTone(LENGTH_0, 36)
playTone(LENGTH_2, 34)#a8
playTone(LENGTH_0, 34)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 32)#g8
playTone(LENGTH_0, 32)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 31)##f8
playTone(LENGTH_0, 31)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 29)#e8
playTone(LENGTH_0, 29)
playTone(LENGTH_0, 29)
playTone(LENGTH_0, 29)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
repeat = repeat + 1
#--------------------------------------------------------------
playTone(LENGTH_2,29)#e8
playTone(LENGTH_0,29)
playTone(LENGTH_2,30)#f8
playTone(LENGTH_0,30)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 32)#g8
playTone(LENGTH_0, 32)
playTone(LENGTH_2, 32)#g8
playTone(LENGTH_0, 32)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 34)#a8
playTone(LENGTH_1, 32)#g8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 30)#f8
playTone(LENGTH_1, 29)#e8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 27)#d8
playTone(LENGTH_0, 27)
playTone(LENGTH_0, 27)
playTone(LENGTH_0, 27)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 29)#e8
playTone(LENGTH_0, 29)
playTone(LENGTH_2, 30)#f8
playTone(LENGTH_0, 30)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 32)#g8
playTone(LENGTH_0, 32)
playTone(LENGTH_2, 32)#g8
playTone(LENGTH_0, 32)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 34)#a8
playTone(LENGTH_1, 32)#g8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 30)#f8
playTone(LENGTH_1, 29)#e8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 27)#d8
playTone(LENGTH_0, 27)
playTone(LENGTH_0, 27)
playTone(LENGTH_0, 27)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 25)#c8
playTone(LENGTH_0, 25)
playTone(LENGTH_2, 27)#d8
playTone(LENGTH_0, 27)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 29)#e8
playTone(LENGTH_0, 29)
playTone(LENGTH_2, 29)#e8
playTone(LENGTH_0, 29)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 30)#f8
playTone(LENGTH_1, 29)#e8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 27)#d8
playTone(LENGTH_1, 25)#c8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 24)#b7
playTone(LENGTH_0, 24)
playTone(LENGTH_0, 24)
playTone(LENGTH_0, 24)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 25)#c8
playTone(LENGTH_0, 25)
playTone(LENGTH_2, 27)#d8
playTone(LENGTH_0, 27)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 29)#e8
playTone(LENGTH_0, 29)
playTone(LENGTH_2, 29)#e8
playTone(LENGTH_0, 29)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 30)#f8
playTone(LENGTH_1, 29)#e8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 27)#d8
playTone(LENGTH_1, 25)#c8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 24)#b7
playTone(LENGTH_0, 24)
playTone(LENGTH_0, 24)
playTone(LENGTH_0, 24)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 24)#b7
playTone(LENGTH_1, 22)#a7
playTone(LENGTH_1, 21)# #g7
playTone(LENGTH_1, 22)#a7
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 25)#c8
playTone(LENGTH_0, 25)
playTone(LENGTH_0, 25)
playTone(LENGTH_0, 25)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 27)#d8
playTone(LENGTH_1, 25)#c8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 24)#b7
playTone(LENGTH_1, 25)#c8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 29)#e8
playTone(LENGTH_0, 29)
playTone(LENGTH_0, 29)
playTone(LENGTH_0, 29)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 30)#f8
playTone(LENGTH_1, 29)#e8
playTone(LENGTH_1, 28)# #d8
playTone(LENGTH_1, 29)#e8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 36)#b8
playTone(LENGTH_1, 34)#a8
playTone(LENGTH_1, 33)# #g8
playTone(LENGTH_1, 34)#a8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 36)#b8
playTone(LENGTH_1, 34)#a8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 33)# #g8
playTone(LENGTH_1, 34)#a8
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 37)#c9
playTone(LENGTH_0, 37)
playTone(LENGTH_0, 37)
playTone(LENGTH_0, 37)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 34)#a8
playTone(LENGTH_0, 34)
playTone(LENGTH_2, 36)#b8
playTone(LENGTH_0, 36)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 37)#c9
playTone(LENGTH_0, 37)
playTone(LENGTH_2, 36)#b8
playTone(LENGTH_0, 36)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 34)#a8
playTone(LENGTH_0, 34)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 33)# #g8
playTone(LENGTH_0, 33)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 34)#a8
playTone(LENGTH_0, 34)
playTone(LENGTH_2, 29)#e8
playTone(LENGTH_0, 29)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 30)#f8
playTone(LENGTH_0, 30)
playTone(LENGTH_2, 27)#d8
playTone(LENGTH_0, 27)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 25)#c8
playTone(LENGTH_0, 25)
playTone(LENGTH_0, 25)
playTone(LENGTH_0, 25)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_2, 24)#b7
playTone(LENGTH_0, 24)
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_1, 22)#a7
playTone(LENGTH_1, 24)#b7
Ed.WriteModuleRegister8Bit(Ed.MODULE_LEFT_LED, Ed.REG_LED_OUTPUT_8, 0) #compensation of some unexplained unsync
playTone(LENGTH_4, 22)#a7
playTone(LENGTH_0, 22)
playTone(LENGTH_0, 22)
playTone(LENGTH_0, 22)
| 40.706024
| 125
| 0.749364
| 2,710
| 16,893
| 4.463469
| 0.156089
| 0.232639
| 0.109127
| 0.177331
| 0.785384
| 0.785053
| 0.778853
| 0.77877
| 0.753142
| 0.749339
| 0
| 0.117274
| 0.150477
| 16,893
| 414
| 126
| 40.804348
| 0.725594
| 0.322796
| 0
| 0.755952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002976
| false
| 0.002976
| 0.002976
| 0
| 0.005952
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e3d146f4e9d0bc3bae08d6c03e773a782f945966
| 4,781
|
py
|
Python
|
pyoaz_tests/test_az_search.py
|
ameroueh/oaz
|
7cf192b02adaa373b7b93bedae3ef67886ea53af
|
[
"MIT"
] | 8
|
2021-03-18T16:06:42.000Z
|
2022-03-09T10:42:44.000Z
|
pyoaz_tests/test_az_search.py
|
ameroueh/oaz
|
7cf192b02adaa373b7b93bedae3ef67886ea53af
|
[
"MIT"
] | null | null | null |
pyoaz_tests/test_az_search.py
|
ameroueh/oaz
|
7cf192b02adaa373b7b93bedae3ef67886ea53af
|
[
"MIT"
] | null | null | null |
import tensorflow.compat.v1 as tf
from pyoaz.thread_pool import ThreadPool
from pyoaz.search import Search
from pyoaz.selection import AZSelector
from pyoaz.evaluator.nn_evaluator import Model, NNEvaluator
from pyoaz.games.connect_four import ConnectFour
from pyoaz.cache.simple_cache import SimpleCache
def test_az_search():
with tf.Session() as session:
# Neural network definition
input = tf.placeholder(
dtype=tf.float32, shape=[None, 6, 7, 2], name="input"
)
conv0_filters = tf.Variable(
[
[[[1.0], [1.0]]],
[[[-2.0], [-2.0]]],
[[[3.0], [3.0]]],
[[[4.0], [4.0]]],
],
name="conv0_filters",
dtype=tf.float32,
)
conv0 = tf.nn.conv2d(input, conv0_filters, 1, "SAME")
max_pool0 = tf.nn.max_pool2d(
conv0, [1, 2, 2, 1], [1, 2, 2, 1], padding="SAME"
)
flat = tf.reshape(max_pool0, [-1, 12], name="flat")
dense_value = tf.Variable([[1.0] for _ in range(12)], dtype=tf.float32)
dense_policy = tf.Variable(
[[1.0 for _ in range(7)] for _ in range(12)], dtype=tf.float32
)
dense_value = tf.Variable([[1.0] for _ in range(12)], dtype=tf.float32)
_ = tf.matmul(flat, dense_value, name="value")
dense_policy = tf.Variable(
[[1.0 for _ in range(7)] for _ in range(12)], dtype=tf.float32
)
_ = tf.matmul(flat, dense_value, name="value")
policy_logits = tf.matmul(flat, dense_policy, name="policy_logits")
_ = tf.nn.softmax(policy_logits, name="policy")
session.run(tf.global_variables_initializer())
# AZ search definition
model = Model(
session=session,
input_node_name="input",
value_node_name="value",
policy_node_name="policy",
)
thread_pool = ThreadPool(n_workers=2)
evaluator = NNEvaluator(
model=model,
thread_pool=thread_pool,
dimensions=(6, 7, 2),
batch_size=1,
)
selector = AZSelector()
game = ConnectFour()
_ = Search(
game=game,
selector=selector,
evaluator=evaluator,
thread_pool=thread_pool,
n_concurrent_workers=2,
n_iterations=100,
noise_epsilon=0.25,
noise_alpha=1,
)
def test_az_search_with_cache():
with tf.Session() as session:
# Neural network definition
input = tf.placeholder(
dtype=tf.float32, shape=[None, 6, 7, 2], name="input"
)
conv0_filters = tf.Variable(
[
[[[1.0], [1.0]]],
[[[-2.0], [-2.0]]],
[[[3.0], [3.0]]],
[[[4.0], [4.0]]],
],
name="conv0_filters",
dtype=tf.float32,
)
conv0 = tf.nn.conv2d(input, conv0_filters, 1, "SAME")
max_pool0 = tf.nn.max_pool2d(
conv0, [1, 2, 2, 1], [1, 2, 2, 1], padding="SAME"
)
flat = tf.reshape(max_pool0, [-1, 12], name="flat")
dense_value = tf.Variable([[1.0] for _ in range(12)], dtype=tf.float32)
dense_policy = tf.Variable(
[[1.0 for _ in range(7)] for _ in range(12)], dtype=tf.float32
)
dense_value = tf.Variable([[1.0] for _ in range(12)], dtype=tf.float32)
_ = tf.matmul(flat, dense_value, name="value")
dense_policy = tf.Variable(
[[1.0 for _ in range(7)] for _ in range(12)], dtype=tf.float32
)
_ = tf.matmul(flat, dense_value, name="value")
policy_logits = tf.matmul(flat, dense_policy, name="policy_logits")
_ = tf.nn.softmax(policy_logits, name="policy")
session.run(tf.global_variables_initializer())
# AZ search definition
model = Model(
session=session,
input_node_name="input",
value_node_name="value",
policy_node_name="policy",
)
thread_pool = ThreadPool(n_workers=1)
cache = SimpleCache(ConnectFour(), 100)
evaluator = NNEvaluator(
model=model,
cache=cache,
thread_pool=thread_pool,
dimensions=(6, 7, 2),
batch_size=1,
)
selector = AZSelector()
game = ConnectFour()
_ = Search(
game=game,
selector=selector,
evaluator=evaluator,
thread_pool=thread_pool,
n_concurrent_workers=1,
n_iterations=100,
noise_epsilon=0.25,
noise_alpha=1,
)
if __name__ == "__main__":
test_az_search()
| 32.972414
| 79
| 0.53378
| 562
| 4,781
| 4.341637
| 0.158363
| 0.034426
| 0.068852
| 0.04918
| 0.840164
| 0.82623
| 0.82623
| 0.82623
| 0.82623
| 0.82623
| 0
| 0.053543
| 0.335913
| 4,781
| 144
| 80
| 33.201389
| 0.714961
| 0.019452
| 0
| 0.724409
| 0
| 0
| 0.033739
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015748
| false
| 0
| 0.055118
| 0
| 0.070866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54202d37d6a351b8063f109dcf184f8b20e08617
| 137
|
py
|
Python
|
tests/fake_adc.py
|
czbiohub/PyButtons
|
f1b7b28afff6678551cf6911387d27265eceb9a0
|
[
"MIT"
] | null | null | null |
tests/fake_adc.py
|
czbiohub/PyButtons
|
f1b7b28afff6678551cf6911387d27265eceb9a0
|
[
"MIT"
] | null | null | null |
tests/fake_adc.py
|
czbiohub/PyButtons
|
f1b7b28afff6678551cf6911387d27265eceb9a0
|
[
"MIT"
] | null | null | null |
class MCP3008():
def __init__(self, channel):
self.fake_val = 0
def value(self, pin=None):
return self.fake_val
| 19.571429
| 32
| 0.613139
| 19
| 137
| 4.105263
| 0.684211
| 0.205128
| 0.282051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050505
| 0.277372
| 137
| 6
| 33
| 22.833333
| 0.737374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
5449c64bad55e384c0b0a379d3d8717816fe412f
| 749
|
py
|
Python
|
src/the_tale/the_tale/game/pvp/jinjaglobals.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 85
|
2017-11-21T12:22:02.000Z
|
2022-03-27T23:07:17.000Z
|
src/the_tale/the_tale/game/pvp/jinjaglobals.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 545
|
2017-11-04T14:15:04.000Z
|
2022-03-27T14:19:27.000Z
|
src/the_tale/the_tale/game/pvp/jinjaglobals.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 45
|
2017-11-11T12:36:30.000Z
|
2022-02-25T06:10:44.000Z
|
import smart_imports
smart_imports.all()
@utils_jinja2.jinjaglobal
def pvp_page_url():
return utils_jinja2.Markup(logic.pvp_page_url())
@utils_jinja2.jinjaglobal
def pvp_info_url():
return utils_jinja2.Markup(logic.pvp_info_url())
@utils_jinja2.jinjaglobal
def pvp_call_to_arena_url():
return utils_jinja2.Markup(logic.pvp_call_to_arena_url())
@utils_jinja2.jinjaglobal
def pvp_leave_arena_url():
return utils_jinja2.Markup(logic.pvp_leave_arena_url())
@utils_jinja2.jinjaglobal
def pvp_accept_arena_battle_url():
return utils_jinja2.Markup(logic.pvp_accept_arena_battle_url())
@utils_jinja2.jinjaglobal
def pvp_create_arena_bot_battle_url():
return utils_jinja2.Markup(logic.pvp_create_arena_bot_battle_url())
| 21.4
| 71
| 0.813084
| 112
| 749
| 4.955357
| 0.205357
| 0.237838
| 0.237838
| 0.27027
| 0.893694
| 0.771171
| 0.536937
| 0.284685
| 0
| 0
| 0
| 0.017699
| 0.094793
| 749
| 34
| 72
| 22.029412
| 0.800885
| 0
| 0
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| true
| 0
| 0.1
| 0.3
| 0.7
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
544a51a4d6e4a8c142b7eb52cfb0173bcd998351
| 20,576
|
py
|
Python
|
cyclopeps/tests/peps/test_peps.py
|
philliphelms/cyclopeps
|
f024d827a7412f4d9df10d6b9453c2692b1a74c3
|
[
"MIT"
] | null | null | null |
cyclopeps/tests/peps/test_peps.py
|
philliphelms/cyclopeps
|
f024d827a7412f4d9df10d6b9453c2692b1a74c3
|
[
"MIT"
] | null | null | null |
cyclopeps/tests/peps/test_peps.py
|
philliphelms/cyclopeps
|
f024d827a7412f4d9df10d6b9453c2692b1a74c3
|
[
"MIT"
] | null | null | null |
import unittest
from cyclopeps.tools.utils import *
import copy
class test_peps(unittest.TestCase):
def test_normalization_Z3(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (5x5) Normalization test with Z3 Symmetry\n'+'-'*50)
Nx = 5
Ny = 5
d = 2
D = 6
chi = 50
Zn = 3 # Zn symmetry (here, Z3)
dZn = 2
backend = 'numpy'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
dZn=dZn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
peps_sparse = peps.make_sparse()
norm0 = peps.calc_norm(chi=chi)
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_normalization_large_Z2(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (10x10) Normalization test with Z2 Symmetry\n'+'-'*50)
Nx = 10
Ny = 10
d = 2
D = 6
chi = 10
Zn = 2 # Zn symmetry (here, Z2)
backend = 'numpy'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Normalize the PEPS
norm2 = peps.normalize()
mpiprint(0,'Symmetric Dense Norm (After normalized) = {}'.format(norm2))
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Sparse Norm (After normalized) = {}'.format(norm3))
# Do some assertions to check if passed
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs(1.0-norm2) < 1e-3)
self.assertTrue(abs(1.0-norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_normalization_Z2(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (5x5) Normalization test with Z2 Symmetry\n'+'-'*50)
Nx = 5
Ny = 5
d = 2
D = 6
chi = 10
Zn = 2 # Zn symmetry (here, Z2)
backend = 'numpy'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Normalize the PEPS
norm2 = peps.normalize()
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm (After normalized) = {}'.format(norm2))
mpiprint(0,'Symmetric Sparse Norm (After normalized) = {}'.format(norm3))
# Do some assertions to check if passed
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs(1.0-norm2) < 1e-3)
self.assertTrue(abs(1.0-norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_normalization(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (5x5) Normalization test without Symmetry\n'+'-'*50)
Nx = 5
Ny = 5
d = 2
D = 6
chi = 10
Zn = None
backend = 'numpy'
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=True)
norm = peps.calc_norm(chi=chi)
mpiprint(0,'Norm = {}'.format(norm))
self.assertTrue(abs(1.0-norm) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_rotate(self):
mpiprint(0,'\n'+'='*50+'\nPeps Rotation test\n'+'-'*50)
from cyclopeps.tools.peps_tools import PEPS
Nx = 3
Ny = 3
d = 2
D = 3
chi = 100
peps = PEPS(Nx=Nx,Ny=Ny,d=d,D=D,chi=chi,normalize=False)
norm0 = peps.calc_norm()
peps.rotate()
norm1 = peps.calc_norm()
peps.rotate(clockwise=False)
norm2 = peps.calc_norm()
mpiprint(0,'Norms = {},{},{}'.format(norm0,norm1,norm2))
self.assertTrue(abs((norm0-norm1)/norm0) < 1e-5)
self.assertTrue(abs((norm0-norm2)/norm0) < 1e-10)
mpiprint(0,'Passed\n'+'='*50)
def test_flip(self):
mpiprint(0,'\n'+'='*50+'\nPeps Flipping test\n'+'-'*50)
from cyclopeps.tools.peps_tools import PEPS
Nx = 5
Ny = 5
d = 2
D = 3
chi = 10
peps = PEPS(Nx=Nx,Ny=Ny,d=d,D=D,chi=chi,normalize=False)
norm0 = peps.calc_norm()
peps.flip()
norm1 = peps.calc_norm()
peps.flip()
norm2 = peps.calc_norm()
mpiprint(0,'Norms = {},{},{}'.format(norm0,norm1,norm2))
self.assertTrue(abs((norm0-norm1)/norm0) < 1e-5)
self.assertTrue(abs((norm0-norm2)/norm0) < 1e-10)
mpiprint(0,'Passed\n'+'='*50)
def test_normalization_Z2(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (5x5) Normalization test with Z2 Symmetry\n'+'-'*50)
Nx = 5
Ny = 5
d = 2
D = 6
chi = 10
Zn = 2 # Zn symmetry (here, Z2)
backend = 'numpy'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Normalize the PEPS
norm2 = peps.normalize()
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm (After normalized) = {}'.format(norm2))
mpiprint(0,'Symmetric Sparse Norm (After normalized) = {}'.format(norm3))
# Do some assertions to check if passed
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs(1.0-norm2) < 1e-3)
self.assertTrue(abs(1.0-norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_rotate_Z2(self):
mpiprint(0,'\n'+'='*50+'\nPeps Z2 Rotation test\n'+'-'*50)
from cyclopeps.tools.peps_tools import PEPS
Nx = 5
Ny = 5
d = 2
D = 6
Zn = 2
chi = 10
backend = 'numpy'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Rotate the PEPS
peps.rotate(clockwise=False)
norm2 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Rotated Symmetric Dense Norm = {}'.format(norm2))
mpiprint(0,'Rotated Symmetric Sparse Norm = {}'.format(norm3))
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs((norm0-norm2)/norm2) < 1e-3)
self.assertTrue(abs((norm0-norm3)/norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_flip_Z2(self):
mpiprint(0,'\n'+'='*50+'\nPeps Z2 Flipping test\n'+'-'*50)
from cyclopeps.tools.peps_tools import PEPS
Nx = 5
Ny = 5
d = 2
D = 6
Zn = 2
chi = 10
backend = 'numpy'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Rotate the PEPS
peps.flip()
norm2 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Flipped Symmetric Dense Norm = {}'.format(norm2))
mpiprint(0,'Flipped Symmetric Sparse Norm = {}'.format(norm3))
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs((norm0-norm2)/norm2) < 1e-3)
self.assertTrue(abs((norm0-norm3)/norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
# CTF Tests
def test_normalization_Z3_ctf(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (5x5) Normalization test with Z3 Symmetry (ctf)\n'+'-'*50)
Nx = 5
Ny = 5
d = 2
D = 6
chi = 50
Zn = 3 # Zn symmetry (here, Z3)
dZn = 2
backend = 'ctf'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
dZn=dZn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
peps_sparse = peps.make_sparse()
norm0 = peps.calc_norm(chi=chi)
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_normalization_large_Z2_ctf(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (10x10) Normalization test with Z2 Symmetry (ctf)\n'+'-'*50)
Nx = 10
Ny = 10
d = 2
D = 6
chi = 10
Zn = 2 # Zn symmetry (here, Z2)
backend = 'ctf'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Normalize the PEPS
norm2 = peps.normalize()
mpiprint(0,'Symmetric Dense Norm (After normalized) = {}'.format(norm2))
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Sparse Norm (After normalized) = {}'.format(norm3))
# Do some assertions to check if passed
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs(1.0-norm2) < 1e-3)
self.assertTrue(abs(1.0-norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_normalization_Z2_ctf(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (5x5) Normalization test with Z2 Symmetry (ctf)\n'+'-'*50)
Nx = 5
Ny = 5
d = 2
D = 6
chi = 10
Zn = 2 # Zn symmetry (here, Z2)
backend = 'ctf'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Normalize the PEPS
norm2 = peps.normalize()
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm (After normalized) = {}'.format(norm2))
mpiprint(0,'Symmetric Sparse Norm (After normalized) = {}'.format(norm3))
# Do some assertions to check if passed
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs(1.0-norm2) < 1e-3)
self.assertTrue(abs(1.0-norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_normalization_ctf(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (5x5) Normalization test without Symmetry (ctf)\n'+'-'*50)
Nx = 5
Ny = 5
d = 2
D = 6
chi = 10
Zn = None
backend = 'ctf'
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=True)
norm = peps.calc_norm(chi=chi)
mpiprint(0,'Norm = {}'.format(norm))
self.assertTrue(abs(1.0-norm) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_rotate_ctf(self):
mpiprint(0,'\n'+'='*50+'\nPeps Rotation test (ctf)\n'+'-'*50)
from cyclopeps.tools.peps_tools import PEPS
Nx = 3
Ny = 3
d = 2
D = 3
chi = 100
peps = PEPS(Nx=Nx,Ny=Ny,d=d,D=D,chi=chi,normalize=False,backend='ctf')
norm0 = peps.calc_norm()
peps.rotate()
norm1 = peps.calc_norm()
peps.rotate(clockwise=False)
norm2 = peps.calc_norm()
mpiprint(0,'Norms = {},{},{}'.format(norm0,norm1,norm2))
self.assertTrue(abs((norm0-norm1)/norm0) < 1e-5)
self.assertTrue(abs((norm0-norm2)/norm0) < 1e-10)
mpiprint(0,'Passed\n'+'='*50)
def test_flip_ctf(self):
mpiprint(0,'\n'+'='*50+'\nPeps Flipping test (ctf)\n'+'-'*50)
from cyclopeps.tools.peps_tools import PEPS
Nx = 5
Ny = 5
d = 2
D = 3
chi = 10
peps = PEPS(Nx=Nx,Ny=Ny,d=d,D=D,chi=chi,normalize=False,backend='ctf')
norm0 = peps.calc_norm()
peps.flip()
norm1 = peps.calc_norm()
peps.flip()
norm2 = peps.calc_norm()
mpiprint(0,'Norms = {},{},{}'.format(norm0,norm1,norm2))
self.assertTrue(abs((norm0-norm1)/norm0) < 1e-5)
self.assertTrue(abs((norm0-norm2)/norm0) < 1e-10)
mpiprint(0,'Passed\n'+'='*50)
def test_normalization_Z2_ctf(self):
from cyclopeps.tools.peps_tools import PEPS
mpiprint(0,'\n'+'='*50+'\nPeps (5x5) Normalization test with Z2 Symmetry (ctf)\n'+'-'*50)
Nx = 5
Ny = 5
d = 2
D = 6
chi = 10
Zn = 2 # Zn symmetry (here, Z2)
backend = 'ctf'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Normalize the PEPS
norm2 = peps.normalize()
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm (After normalized) = {}'.format(norm2))
mpiprint(0,'Symmetric Sparse Norm (After normalized) = {}'.format(norm3))
# Do some assertions to check if passed
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs(1.0-norm2) < 1e-3)
self.assertTrue(abs(1.0-norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_rotate_Z2_ctf(self):
mpiprint(0,'\n'+'='*50+'\nPeps Z2 Rotation tes (ctf)t\n'+'-'*50)
from cyclopeps.tools.peps_tools import PEPS
Nx = 5
Ny = 5
d = 2
D = 6
Zn = 2
chi = 10
backend = 'ctf'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Rotate the PEPS
peps.rotate(clockwise=False)
norm2 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Rotated Symmetric Dense Norm = {}'.format(norm2))
mpiprint(0,'Rotated Symmetric Sparse Norm = {}'.format(norm3))
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs((norm0-norm2)/norm2) < 1e-3)
self.assertTrue(abs((norm0-norm3)/norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
def test_flip_Z2_ctf(self):
mpiprint(0,'\n'+'='*50+'\nPeps Z2 Flipping test (ctf)\n'+'-'*50)
from cyclopeps.tools.peps_tools import PEPS
Nx = 5
Ny = 5
d = 2
D = 6
Zn = 2
chi = 10
backend = 'ctf'
# Generate random PEPS
peps = PEPS(Nx=Nx,
Ny=Ny,
d=d,
D=D,
chi=chi,
Zn=Zn,
backend=backend,
normalize=False)
# Compute the norm (2 ways for comparison)
norm0 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm1 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Symmetric Dense Norm = {}'.format(norm0))
mpiprint(0,'Symmetric Sparse Norm = {}'.format(norm1))
# Rotate the PEPS
peps.flip()
norm2 = peps.calc_norm(chi=chi)
peps_sparse = peps.make_sparse()
norm3 = peps_sparse.calc_norm(chi=chi)
mpiprint(0,'Flipped Symmetric Dense Norm = {}'.format(norm2))
mpiprint(0,'Flipped Symmetric Sparse Norm = {}'.format(norm3))
self.assertTrue(abs((norm0-norm1)/norm1) < 1e-3)
self.assertTrue(abs((norm0-norm2)/norm2) < 1e-3)
self.assertTrue(abs((norm0-norm3)/norm3) < 1e-3)
mpiprint(0,'Passed\n'+'='*50)
if __name__ == "__main__":
unittest.main()
| 36.289242
| 99
| 0.516816
| 2,510
| 20,576
| 4.16255
| 0.036255
| 0.074081
| 0.068338
| 0.053599
| 0.983633
| 0.983633
| 0.983633
| 0.983059
| 0.971286
| 0.963821
| 0
| 0.053451
| 0.350797
| 20,576
| 566
| 100
| 36.353357
| 0.728702
| 0.06527
| 0
| 0.932
| 0
| 0
| 0.132722
| 0
| 0.036
| 0
| 0
| 0
| 0.084
| 1
| 0.036
| false
| 0.036
| 0.042
| 0
| 0.08
| 0.172
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5468be63f7fb61bb688112c2096603ac198ba39f
| 152,476
|
py
|
Python
|
src/python/plotting.py
|
dpopchev/STT_theories
|
c7b49c8f3ff443156c275c93a9e8b497df0f8293
|
[
"MIT"
] | null | null | null |
src/python/plotting.py
|
dpopchev/STT_theories
|
c7b49c8f3ff443156c275c93a9e8b497df0f8293
|
[
"MIT"
] | null | null | null |
src/python/plotting.py
|
dpopchev/STT_theories
|
c7b49c8f3ff443156c275c93a9e8b497df0f8293
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import glob
import shutil
import itertools
import pathlib
import random
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.lines import Line2D
from IPython import get_ipython
from matplotlib import style
from matplotlib.gridspec import GridSpec
from numpy.polynomial.polynomial import polyfit
#~ get_ipython().run_line_magic("matplotlib", "qt")
#~ import matplotlib
#~ matplotlib.use('Qt5Agg')
class plot_result:
def __init__(self):
self.my_ResPath = None
self.my_EOSname = None
self.my_fname_starts = None
self_my_file = None
self.my_headline = None
self.my_data = None
self.my_label = None
self.kalin_path = None
self.kalin_file = None
self.kalin_headline = None
self.kalin_data = None
self.kalin_label = None
self.kalin_mapping = {
"rho_c": 0,
"AR": 1,
"M": 2,
"J": 3,
"phiScal_c": 4,
"p_c": 5
}
self.units = self._units_coef_clac()
self.specific_ms = None
self.specific_ls = None
self.specific_c = None
return
def _luminosity_color(self, color, amount=0.5):
"""
Lightens the given color by multiplying (1-luminosity) by the given amount.
Input can be matplotlib color string, hex string, or RGB tuple.
Examples:
>> lighten_color('g', 0.3)
>> lighten_color('#F034A3', 0.6)
>> lighten_color((.3,.55,.1), 0.5)
"""
import matplotlib.colors as mc
import colorsys
try:
c = mc.cnames[color]
except:
c = color
c = colorsys.rgb_to_hls(*mc.to_rgb(c))
return colorsys.hls_to_rgb(abs(c[0]), abs(1 - amount * (1 - c[1])), abs(c[2]))
def set_my_ResPath(
self,
my_ResPath = "~/projects/STT_theories/results/"
):
"""
path to the results of the shootings
"""
self.my_ResPath = os.path.expanduser(my_ResPath)
return
def set_severalEOSs_ms_ls_c(
self, severalEOSs, m_lambda_style = { "ls": "lambda", "c": "m" }
):
"""
for consistent marking on all graphs for the current instance
provide which will be the marker for m and lambda
EOS is always the marker style with dictionary
{"ls": "lambda", "c": "m"} will map linestyle to lambda and color to m
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
self.specific_ms = None
self.specific_ms = self._get_specific_ms( list(
set( [ _["name"] for _ in severalEOSs ] )
), "name" )
self.specific_ls = None
self.specific_ls = self._get_specific_ls( list(
set( [ _[m_lambda_style["ls"]] for _ in severalEOSs ] )
), m_lambda_style["ls"] )
self.specific_c = None
self.specific_c = self._get_specific_c( list(
set( [ _[m_lambda_style["c"]] for _ in severalEOSs ] )
), m_lambda_style["c"] )
return
def get_my_ResPath(self):
return self.my_ResPath
def set_my_EOSname(self, EOS_name):
"""
set the name of EOS to be plotted, as it is a directory in my_ResPath
"""
self.my_EOSname = EOS_name
return
def get_my_EOSname(self):
return self.my_EOSname
def get_my_latests_res(self, fname = "STT_phiScal_J_"):
"""
return the latest result file in my_ResPath
"""
try:
return max(
glob.glob(os.path.join(self.my_ResPath, fname + "*")),
key = os.path.getctime
)
except ValueError:
return None
def move_my_latest_res(self):
"""
move the latest result file from my_ResPath to the EOS_model name dir
"""
full_latest_res = self.get_my_latests_res()
if full_latest_res:
latest_result = os.path.basename(full_latest_res)
else:
print("\n No latest result at \n\t {} \n".format(
self.my_ResPath
)
)
return
src = os.path.join( self.my_ResPath, latest_result )
dst = os.path.join( self.my_ResPath, self.my_EOSname, latest_result )
print(
"\n moving \n\t from {} \n\t to {} \n".format(
src,
dst
)
)
shutil.move(src, dst)
return
def get_my_latests_res_file(self, fname = "STT_phiScal_J_"):
"""
return the latest result file in my_ResPath for current EOS
"""
try:
return max(
glob.glob(os.path.join(
self.my_ResPath, fname + "*"
) ),
key = os.path.getctime
)
except ValueError:
return None
def plot_latest_resEOSname_severalEOSs(self, severalEOSs):
"""
get the latest result from the root folder and plot it alongside
severalEOSs, which again is list of dicsts in the expected form
"""
label, headline, data = self.get_resEOSname_data(
self.get_my_latests_res_file()
)
fig, all_axes = self._get_figure(2,2, self._4by4_grid_placement)
ax_M_AR = all_axes[0]
ax_J_M = all_axes[1]
ax_phiScal_c_p_c = all_axes[2]
ax_rho_c_p_c = all_axes[3]
self._set_parms( ax_M_AR, "AR", "M" )
self._set_parms( ax_J_M, "M", "J" )
self._set_parms( ax_phiScal_c_p_c, "$p_c$", "$\\varphi_c$" )
self._set_parms( ax_rho_c_p_c, "$p_c$", "$\\rho_c$" )
ax_M_AR.plot(
data[3],
data[2],
linewidth=0,
linestyle="",
marker = "o",
markersize = 3
)
ax_J_M.plot(
data[2],
data[5],
linewidth=0,
linestyle="",
marker = "o",
markersize = 3
)
ax_phiScal_c_p_c.plot(
data[0],
data[1],
linewidth=0,
linestyle="",
marker = "o",
markersize = 3
)
ax_rho_c_p_c.plot(
data[0],
data[4],
linewidth=0,
linestyle="",
marker = "o",
markersize = 3
)
val_EOSname, val_beta, val_m, val_lambda = self._get_parameter_values(label)
plt.suptitle(
"EOS = {}; beta = {:.1f}; m = {:.1e}; lambda = {:.1e}".format(
val_EOSname, val_beta, val_m, val_lambda
),
fontsize=10, y=0.998
)
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
ls, lc, ms, mc = self._get_ls_lc_ms_mc()
ax_M_AR.plot(
data[3],
data[2],
label = "{}"
"\n\t $\\beta$ = {:.1f}"
"\n\t m = {:.1e}"
"\n\t $\\lambda$ = {:.1e}".format(
eos["name"], eos["beta"], eos["m"], eos["lambda"]
),
color = lc,
linestyle = ls,
marker = ms,
markerfacecolor = mc,
markeredgecolor = mc,
markersize = 2.5,
linewidth = 1.5,
markevery = self._get_markevry(data[3]),
alpha = 0.5
)
ax_J_M.plot(
data[2],
data[5],
label = "{}"
"\n\t $\\beta$ = {:.1f}"
"\n\t m = {:.1e}"
"\n\t $\\lambda$ = {:.1e}".format(
eos["name"], eos["beta"], eos["m"], eos["lambda"]
),
color = lc,
linestyle = ls,
marker = ms,
markerfacecolor = mc,
markeredgecolor = mc,
markersize = 2.5,
linewidth = 1.5,
markevery = self._get_markevry(data[3]),
alpha = 0.5
)
ax_phiScal_c_p_c.plot(
data[0],
data[1],
label = "{}"
"\n\t $\\beta$ = {:.1f}"
"\n\t m = {:.1e}"
"\n\t $\\lambda$ = {:.1e}".format(
eos["name"], eos["beta"], eos["m"], eos["lambda"]
),
color = lc,
linestyle = ls,
marker = ms,
markerfacecolor = mc,
markeredgecolor = mc,
markersize = 2.5,
linewidth = 1.5,
markevery = self._get_markevry(data[3]),
alpha = 0.5
)
ax_rho_c_p_c.plot(
data[0],
data[4],
label = "{}"
"\n\t $\\beta$ = {:.1f}"
"\n\t m = {:.1e}"
"\n\t $\\lambda$ = {:.1e}".format(
eos["name"], eos["beta"], eos["m"], eos["lambda"]
),
color = lc,
linestyle = ls,
marker = ms,
markerfacecolor = mc,
markeredgecolor = mc,
markersize = 2.5,
linewidth = 1.5,
markevery = self._get_markevry(data[3]),
alpha = 0.5
)
ax_rho_c_p_c.legend(
loc="best",
fontsize=8,
handlelength=3.2,
numpoints=1,
fancybox=True,
markerscale = 1.5
)
plt.show()
return
def get_resEOSname_data(self, fpath):
"""
get the following data form fpath, which is the full path to file
label - the name of the file as string
headline - the name for each column, as a list
data - the data itself as a list, each sublist is different column
"""
with open(fpath, "r") as f:
all_data = f.readlines()
label = os.path.basename(fpath)
headline = [
_.strip() for _ in all_data.pop(0).strip().split(" ")
if
"#" not in _ and
len(_.strip())
]
data = [
[] for _ in all_data[0].strip().split(" ") if len(_.strip())
]
for line in all_data:
for d, n in zip(
data, [ float(_) for _ in line.strip().split(" ") if len(_.strip()) ]
):
d.append(n)
if abs(data[1][-1]) > 1e-5 and data[1][-1] > 0:
data[1][-1] *= (-1)
return label, headline, data
def get_severalEOS_data(
self,
severalEOSs,
fname = "STT_phiScal_J"
):
"""
for provided list of dictionaries called severalEOSs get the data
the dict has following structure
{
"name": EOSname_string,
"beta": Value_Beta,
"m": Value_M,
"lambda": Value_lambda
}
"""
all_label = []
all_headline = []
all_data = []
for eos in severalEOSs:
EOSname = "_".join( [
fname,
eos["name"],
"beta{:.3e}".format(eos["beta"]),
"m{:.3e}".format(eos["m"]),
"lambda{:.3e}".format(eos["lambda"])
] )
EOSpath = os.path.join( self.my_ResPath, eos["name"], EOSname)
_label, _headline, _data = self.get_resEOSname_data(EOSpath)
all_label.append(_label)
all_headline.append(_headline)
all_data.append(_data)
return all_label, all_headline, all_data
def get_uniEOSname_data_uniI(self, fpath):
"""
for provided filepaths to tilde I return the entries as nested list
labels and headline
"""
with open(fpath, "r") as f:
all_data = f.readlines()
label = os.path.basename(fpath)
headline = [
_.strip() for _ in all_data.pop(0).strip().split(" ")
if
"#" not in _ and
len(_.strip())
]
data = [
[] for _ in all_data[0].strip().split(" ") if len(_.strip())
]
for line in all_data:
for d, n in zip(
data, [ float(_) for _ in line.strip().split(" ") if len(_.strip()) ]
):
d.append(n)
return label, headline, data
def get_severalEOS_uniTildeI_data(
self,
severalEOSs,
fname = "STT_phiScal_J",
append = ""
):
"""
for provided list of dictionaries called severalEOSs get the data for
universal I, which are in the "Fitting" directory of each EOS
one of the is *_tildeI for tilde I
other is *_barI for bar I
for each entyr in severalEOS return two nested lists barI and tildeI
{
"name": EOSname_string,
"beta": Value_Beta,
"m": Value_M,
"lambda": Value_lambda
}
"""
all_label = []
all_headline = []
all_data = []
for eos in severalEOSs:
EOSname_tildeI = "_".join( [
fname,
eos["name"],
"beta{:.3e}".format(eos["beta"]),
"m{:.3e}".format(eos["m"]),
"lambda{:.3e}".format(eos["lambda"]),
"tildeI",
append
] ) if append else "_".join( [
fname,
eos["name"],
"beta{:.3e}".format(eos["beta"]),
"m{:.3e}".format(eos["m"]),
"lambda{:.3e}".format(eos["lambda"]),
"tildeI"
] )
EOSpath_tildeI = os.path.join(
self.my_ResPath, eos["name"], "Fitting", EOSname_tildeI
)
_label, _headline, _data = self.get_uniEOSname_data_uniI(EOSpath_tildeI)
all_label.append(_label)
all_headline.append(_headline)
all_data.append(_data)
return all_label, all_headline, all_data
def get_severalEOS_uniBarI_data(
self,
severalEOSs,
fname = "STT_phiScal_J",
append = ""
):
"""
for provided list of dictionaries called severalEOSs get the data for
universal I, which are in the "Fitting" directory of each EOS
one of the is *_tildeI for tilde I
other is *_barI for bar I
for each entyr in severalEOS return two nested lists barI and tildeI
{
"name": EOSname_string,
"beta": Value_Beta,
"m": Value_M,
"lambda": Value_lambda
}
"""
all_label = []
all_headline = []
all_data = []
for eos in severalEOSs:
EOSname_barI = "_".join( [
fname,
eos["name"],
"beta{:.3e}".format(eos["beta"]),
"m{:.3e}".format(eos["m"]),
"lambda{:.3e}".format(eos["lambda"]),
"barI",
append
] ) if append else "_".join( [
fname,
eos["name"],
"beta{:.3e}".format(eos["beta"]),
"m{:.3e}".format(eos["m"]),
"lambda{:.3e}".format(eos["lambda"]),
"barI"
] )
EOSpath_tildeI = os.path.join(
self.my_ResPath, eos["name"], "Fitting", EOSname_barI
)
_label, _headline, _data = self.get_uniEOSname_data_uniI(EOSpath_tildeI)
all_label.append(_label)
all_headline.append(_headline)
all_data.append(_data)
return all_label, all_headline, all_data
def plot_severalEOSs_MvsR(self, severalEOSs):
"""
plot several EOSs by listing them in <severalEOSs> with dictionaries
see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "R [km]", "$M/M_{\odot}$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
data[3] = [ _*self.units["R"] for _ in data[3] ]
ax.plot(
data[3],
data[2],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(data[3], data[2]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
ax.legend(
handles = [*lines_markers, *lines_colors, *lines_linestyles],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1,
ncol = 3,
frameon = False,
mode = None
)
plt.show()
return
def plot_severalEOSs_MvsR_costume(self, severalEOSs):
"""
plot several EOSs by listing them in <severalEOSs> with dictionaries
see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
def _get_color():
colors = [
"#800000", "#9A6324", "#808000", "#469990", "#000075",
"#e6194B", "#f58231", "#ffe119", "#bfef45", "#3cb44b", "#42d4f4",
"#4363d8", "#911eb4", "#f032e6", "#a9a9a9", "#fabebe", "#ffd8b1",
"#aaffc3", "#e6beff"
]
for _ in range(5):
random.shuffle(colors)
for _ in colors:
yield _
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "R [km]", "$M/M_{\odot}$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
data[3] = [ _*self.units["R"] for _ in data[3] ]
_tmp = self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
ax.plot(
data[3],
data[2],
label = None,
linewidth = 2,
markersize = 10,
markevery = self._get_markevry(data[3], data[2]),
color = "#f58231",
linestyle = "-",
marker = _tmp["marker"],
markerfacecolor = "#f58231",
markeredgecolor = "#f58231"
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
#~ ax.fill([9.8, 15.3, 15.3, 9.8], [1.3, 1.3, 1.5, 1.5], fill=False, hatch='//')
#~ ax.fill([8.8, 15.3, 15.3, 8.8], [1.9, 1.9, 2.1, 2.1], fill=False, hatch='\\')
ax.set_xlim(7.5,15.5)
ax.set_ylim(0.4,3)
ax.add_artist( ax.legend(
handles = [*lines_markers],
loc="best",
fontsize=10,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1.5,
ncol = 2,
frameon = False,
mode = None
) )
plt.savefig(
'MvsR_costume.eps', format="eps",
bbox_inches='tight',
dpi=200,
pad_inches=0
)
plt.show()
return
def plot_severalEOSs_MvsR_stable(self, severalEOSs):
"""
plot several EOSs by listing them in <severalEOSs> with dictionaries
see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "R [km]", "$M/M_{\odot}$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ we are interested only in stable modules - part of the graphs
#~ where the mass is increasing
#~ after the maximum mass we can cut them off
#~ the min mass we are interested is the one which is at least 0.5 M sun
min_mass = 0.5
little_offset = 2
max_m_i = data[2].index(max(data[2]))
max_m_i = max_m_i + little_offset \
if max_m_i + little_offset < len(data[2]) - 1 else max_m_i
min_m_i = list(
map(lambda _: _ >= min_mass, data[2][:max_m_i])
).index(True)
min_m_i = min_m_i - little_offset \
if min_m_i - little_offset > 0 else min_m_i
stable_R = [ _*self.units["R"] for _ in data[3][min_m_i:max_m_i] ]
ax.plot(
stable_R,
data[2][min_m_i:max_m_i],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(stable_R, data[2][min_m_i:max_m_i]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
ax.legend(
handles = [*lines_markers, *lines_colors, *lines_linestyles],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1,
ncol = 3,
frameon = False,
mode = None
)
plt.show()
return
def plot_severalEOSs_MvsR_GR(self, severalEOSs):
"""
plot several EOSs by listing them in <severalEOSs> with dictionaries
see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
all_label_GR, all_headline_GR, all_data_GR = self.get_severalEOS_data( [
{ "name": _, "beta": 0, "m": 0, "lambda": 0 }
for _ in set( [ _["name"] for _ in severalEOSs ] )
]
)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "R [km]", "$M/M_{\odot}$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ we are interested only in stable modules - part of the graphs
#~ where the mass is increasing
#~ after the maximum mass we can cut them off
#~ the min mass we are interested is the one which is at least 0.5 M sun
min_mass = 0.5
little_offset = 2
max_m_i = data[2].index(max(data[2]))
max_m_i = max_m_i + little_offset \
if max_m_i + little_offset < len(data[2]) - 1 else max_m_i
min_m_i = list(
map(lambda _: _ >= min_mass, data[2][:max_m_i])
).index(True)
min_m_i = min_m_i - little_offset \
if min_m_i - little_offset > 0 else min_m_i
stable_R = [ _*self.units["R"] for _ in data[3][min_m_i:] ]
ax.plot(
stable_R,
data[2][min_m_i:],
label = None,
linewidth = 2,
markersize = 10,
markevery = self._get_markevry(stable_R, data[2][min_m_i:]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
#~ GR_color_markers = "#ef4026"
GR_color_markers = "#f58231"
#~ GR_color_markers = "#a9f971"
#~ GR_color_fit = "#ed0dd9"
GR_color_fit = GR_color_markers
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
#~ we are interested only in stable modules - part of the graphs
#~ where the mass is increasing
#~ after the maximum mass we can cut them off
#~ the min mass we are interested is the one which is at least 0.5 M sun
min_mass = 0.5
little_offset = 2
#~ max_m_i = data[2].index(max(data[2]))
max_m_i = data[2].index(data[2][-1])
max_m_i = max_m_i + little_offset \
if max_m_i + little_offset < len(data[2]) - 1 else max_m_i
min_m_i = list(
map(lambda _: _ >= min_mass, data[2][:max_m_i])
).index(True)
min_m_i = min_m_i - little_offset \
if min_m_i - little_offset > 0 else min_m_i
stable_R = [ _*self.units["R"] for _ in data[3][min_m_i:max_m_i] ]
ax.plot(
stable_R,
data[2][min_m_i:max_m_i],
label = None,
linewidth = 2,
markersize = 10,
markevery = self._get_markevry(
stable_R,
data[2][min_m_i:max_m_i]
),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers,
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles, severalEOSs
)
#~ ax.fill([9.8, 15.3, 15.3, 9.8], [1.3, 1.3, 1.5, 1.5], fill=False, hatch='//')
#~ ax.fill([8.8, 15.3, 15.3, 8.8], [1.9, 1.9, 2.1, 2.1], fill=False, hatch='\\')
ax.set_xlim(7.5,15.5)
ax.set_ylim(0.4,3)
ax.add_artist( ax.legend(
handles = [
*lines_linestyles, *lines_colors,
Line2D(
[0], [0], color = GR_color_fit, marker = None, linestyle
= "-", linewidth = 1.5, label = "GR"
)
],
loc="upper left",
fontsize=10,
handlelength=3,
numpoints=1,
fancybox=True,
markerscale = 1.5,
ncol = 1,
frameon = False,
mode = None
) )
ax.add_artist( ax.legend(
handles = [
*lines_markers,
],
loc="upper center",
fontsize=10,
handlelength=3,
numpoints=1,
fancybox=True,
markerscale = 1.5,
ncol = 1,
frameon = False,
mode = None
) )
plt.savefig(
'MvsR_STT_GR.eps', format="eps",
bbox_inches='tight',
dpi=200,
pad_inches=0
)
plt.show()
return
def plot_severalEOSs_MvsR_GR_Ifunc(self, severalEOSs):
"""
plot several EOSs by listing them in <severalEOSs> with dictionaries
see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
all_label_GR, all_headline_GR, all_data_GR = self.get_severalEOS_data( [
{ "name": _, "beta": 0, "m": 0, "lambda": 0 }
for _ in set( [ _["name"] for _ in severalEOSs ] )
]
)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "R [km]", "$M/M_{\odot}$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ we are interested only in stable modules - part of the graphs
#~ where the mass is increasing
#~ after the maximum mass we can cut them off
#~ the min mass we are interested is the one which is at least 0.5 M sun
min_mass = 0.5
little_offset = 2
max_m_i = data[2].index(max(data[2]))
max_m_i = max_m_i + little_offset \
if max_m_i + little_offset < len(data[2]) - 1 else max_m_i
min_m_i = list(
map(lambda _: _ >= min_mass, data[2][:max_m_i])
).index(True)
min_m_i = min_m_i - little_offset \
if min_m_i - little_offset > 0 else min_m_i
stable_R = [ _*self.units["R"] for _ in data[3][min_m_i:] ]
ax.plot(
stable_R,
data[2][min_m_i:],
label = None,
linewidth = 2,
markersize = 10,
markevery = self._get_markevry(stable_R, data[2][min_m_i:]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
#~ GR_color_markers = "#ef4026"
GR_color_markers = "#f58231"
#~ GR_color_markers = "#a9f971"
#~ GR_color_fit = "#ed0dd9"
GR_color_fit = GR_color_markers
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
#~ we are interested only in stable modules - part of the graphs
#~ where the mass is increasing
#~ after the maximum mass we can cut them off
#~ the min mass we are interested is the one which is at least 0.5 M sun
min_mass = 0.5
little_offset = 2
#~ max_m_i = data[2].index(max(data[2]))
max_m_i = data[2].index(data[2][-1])
max_m_i = max_m_i + little_offset \
if max_m_i + little_offset < len(data[2]) - 1 else max_m_i
min_m_i = list(
map(lambda _: _ >= min_mass, data[2][:max_m_i])
).index(True)
min_m_i = min_m_i - little_offset \
if min_m_i - little_offset > 0 else min_m_i
stable_R = [ _*self.units["R"] for _ in data[3][min_m_i:max_m_i] ]
ax.plot(
stable_R,
data[2][min_m_i:max_m_i],
label = None,
linewidth = 2,
markersize = 10,
markevery = self._get_markevry(
stable_R,
data[2][min_m_i:max_m_i]
),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers,
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles, severalEOSs
)
#~ ax.set_xlim(9,15)
#~ ax.set_ylim(0.5,3.2)
ax.add_artist( ax.legend(
handles = [
*lines_linestyles, *lines_colors,
Line2D(
[0], [0], color = GR_color_fit, marker = None, linestyle
= "-", linewidth = 1.5, label = "GR"
)
],
loc="upper left",
fontsize=10,
handlelength=3,
numpoints=1,
fancybox=True,
markerscale = 1.5,
ncol = 1,
frameon = False,
mode = None
) )
ax.add_artist( ax.legend(
handles = [
*lines_markers,
],
loc="upper center",
fontsize=10,
handlelength=3,
numpoints=1,
fancybox=True,
markerscale = 1.5,
ncol = 1,
frameon = False,
mode = None
) )
plt.savefig(
'MvsR_STT_GR.eps', format="eps",
bbox_inches='tight',
dpi=200,
pad_inches=0
)
plt.show()
return
def plot_severalEOSs_phiScal_cVSp_c(self, severalEOSs):
"""
plot several EOSs by listing them in <severalEOSs> with dictionaries
see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "$p_c$", "$\\varphi_c$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ ls, lc, ms, mc = self._get_ls_lc_ms_mc()
ax.plot(
data[0],
data[1],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
ax.legend(
handles = [*lines_markers, *lines_colors, *lines_linestyles],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1,
ncol = 3,
frameon = False,
mode = None
)
plt.show()
return
def plot_severalEOSs_phiScal_cvsrho_c(self, severalEOSs):
"""
plot several EOSs by listing them in <severalEOSs> with dictionaries
see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "$\\rho_c [g/cm^3]$", "$\\varphi_c$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
data[-2] = [ _*self.units["density"] for _ in data[-2] ]
ax.plot(
data[-2],
data[1],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(data[-2],data[1]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
ax.legend(
handles = [*lines_markers, *lines_colors, *lines_linestyles],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1,
ncol = 3,
frameon = False,
mode = None
)
plt.show()
return
def plot_severalEOSs_Mvsrho_c(self, severalEOSs):
"""
plot several EOSs by listing them in <severalEOSs> with dictionaries
see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "$\\rho_c [g/cm^3]$", "$M/M_{\odot}$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
data[-2] = [ _*self.units["density"] for _ in data[-2] ]
ax.plot(
data[-2],
data[2],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(data[-2], data[2]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
ax.legend(
handles = [*lines_markers, *lines_colors, *lines_linestyles],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1,
ncol = 3,
frameon = False,
mode = None
)
plt.show()
return
def plot_severalEOSs_JvsM(self, severalEOSs):
"""
plot several EOSs by listing them in <severalEOSs> with dictionaries
see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_data(severalEOSs)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "$M/M_{\odot}$", "$J 10^{45} [g cm^3]$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ the last multiplication is added as it is scaled out in the plot
data[-1] = [ _*self.units["J"]*1e-45 for _ in data[-1] ]
ax.plot(
data[2],
data[-1],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(data[2],data[-1]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
ax.legend(
handles = [*lines_markers, *lines_colors, *lines_linestyles],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1,
ncol = 3,
frameon = False,
mode = None
)
ax.set_xlim(0.9, 2.75)
ax.set_ylim(0.5, 3.25)
plt.show()
return
def convert_to_fitting(self, severalEOSs, fname = "STT_phiScal_J"):
"""
for the provided list of dics of EOSs go over their results and create, by
appending [name of result]_tildeI and [name of result]_barI, the
neaceassery ceofficients for the fitting
IT WILL OVERWRITE EXISTING !!!!
EXAMPLE
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
for eos in severalEOSs:
result = os.path.join(
self.my_ResPath,
eos["name"],
"_".join( [
fname,
eos["name"],
"beta{:.3e}".format(eos["beta"]),
"m{:.3e}".format(eos["m"]),
"lambda{:.3e}".format(eos["lambda"])
] )
)
with open(result, "r") as f:
src_data_lines = f.readlines()
#~ first line is just headline
src_data_lines.pop(0)
current_convert = "_tildeI"
target_path = os.path.join(
self.my_ResPath, eos["name"], "Fitting"
)
pathlib.Path( target_path ).mkdir(parents=True, exist_ok=True)
target = os.path.join(
target_path,
os.path.basename(result) + current_convert
)
print(
"\n will convert \n\t from {} \n\t to {} \n\t as {}".format(
result, target, current_convert
)
)
with open( target, "w" ) as f:
f.write("# M/R I/(MR**2) \n")
for line in src_data_lines:
if not line.strip():
continue
tmp = [
float(_) for _ in line.strip().split(" ") if len(_.strip())
]
f.write(
"{:.6e} {:.6e} \n".format(
tmp[2]/tmp[3], tmp[5]/(tmp[2]*tmp[3]**2)
)
)
current_convert = "_barI"
target = os.path.join(
target_path,
os.path.basename(result) + current_convert
)
print(
"\n will convert \n\t from {} \n\t to {} \n\t as {}".format(
result, target, current_convert
)
)
with open( target, "w" ) as f:
f.write("# (M/R)**-1 I/M**3 \n")
for line in src_data_lines:
if not line.strip():
continue
tmp = [
float(_) for _ in line.strip().split(" ") if len(_.strip())
]
f.write(
"{:.6e} {:.6e} \n".format(
tmp[2]/tmp[3], tmp[5]/(tmp[2]**3)
)
)
return
def convert_to_fitting_stable(self, severalEOSs, fname = "STT_phiScal_J"):
"""
for the provided list of dics of EOSs go over their results and create, by
appending [name of result]_tildeI and [name of result]_barI, the
neaceassery ceofficients for the fitting
IT WILL OVERWRITE EXISTING !!!!
Since I want to convert only the stable models I will get only those
entries in the data which have increasing Mass,
Strictly speaking - those entries up to the line with maximum mass
EXAMPLE
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
def _get_max_min_M_i(src_data_lines):
"""
load all netries of column 2 - the mass
find the line with maximum mass and use only those
since we want only stable models
"""
min_mass = 0.5
stable_m = []
for line in src_data_lines:
stable_m.append(
[
float(_) for _ in line.strip().split(" ") if len(_.strip())
][2]
)
little_offset = 2
max_m_i = stable_m.index(max(stable_m))
max_m_i = max_m_i + little_offset \
if max_m_i + little_offset < len(stable_m) - 1 else max_m_i
min_m_i = list(
map(lambda _: _ >= min_mass, stable_m[:max_m_i])
).index(True)
min_m_i = min_m_i - little_offset \
if min_m_i - little_offset > 0 else min_m_i
return max_m_i, min_m_i
#~ min_M*Mass of Sun
min_M = 0.5
print(
"\n Since the minimum measured mass is 1 times the mass of Sun"
" will cut out masses smaller than {} \n".format(min_M)
)
for eos in severalEOSs:
result = os.path.join(
self.my_ResPath,
eos["name"],
"_".join( [
fname,
eos["name"],
"beta{:.3e}".format(eos["beta"]),
"m{:.3e}".format(eos["m"]),
"lambda{:.3e}".format(eos["lambda"])
] )
)
convert_tilde = "_tildeI_stable"
convert_bar = "_barI_stable"
convert_dir = "Fitting"
target_path = os.path.join(
self.my_ResPath, eos["name"], convert_dir
)
pathlib.Path( target_path ).mkdir(parents=True, exist_ok=True)
with open(result, "r") as src, \
open(
os.path.join( target_path, os.path.basename(result) + convert_tilde ),
"w"
) as dst_tilde, \
open(
os.path.join( target_path, os.path.basename(result) + convert_bar ),
"w"
) as dst_bar:
src_data_lines = src.readlines()
#~ first line is just headline
src_data_lines.pop(0)
max_M_i, min_M_i = _get_max_min_M_i(src_data_lines)
print(
"\n Now converting from {}"
"\n\t EOS {}"
"\n\t model {}"
"\n\t to tilde {}"
"\n\t to bar {}".format(
self.my_ResPath,
eos["name"],
result,
dst_tilde.name,
dst_bar.name
)
)
dst_tilde.write("# M/R I/(MR**2) \n")
dst_bar.write("# (M/R)**-1 I/M**3 \n")
for line in src_data_lines[min_M_i:max_M_i]:
if not line.strip():
continue
tmp = [
float(_) for _ in line.strip().split(" ") if len(_.strip())
]
dst_tilde.write(
"{:.6e} {:.6e} \n".format(
tmp[2]/tmp[3], tmp[5]/(tmp[2]*tmp[3]**2)
)
)
dst_bar.write(
"{:.6e} {:.6e} \n".format(
tmp[2]/tmp[3], tmp[5]/(tmp[2]**3)
)
)
print(
"\n Since the minimum measured mass is 1 times the mass of Sun"
" will cut out masses smaller than {} \n".format(min_M)
)
return
def plot_severalEOSs_uniTildeI(self, severalEOSs ):
"""
plot severalEOS unifersal Tilde I relationships
<severalEOSs> with dictionaries see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_uniTildeI_data(severalEOSs)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "M/R", "$I/(MR^2)$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
ax.plot(
data[0],
data[1],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
ax.legend(
handles = [*lines_markers, *lines_colors, *lines_linestyles],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1.25,
ncol = 3,
frameon = False,
mode = None
)
ax_up.set_xlim(0.09)
plt.show()
return
def plot_severalEOSs_uniTildeI_polyFit(
self, severalEOSs, append_stable = "stable"
):
"""
plot severalEOS unifersal Tilde I relationships
<severalEOSs> with dictionaries see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
_get_max = lambda data, _: max(data) if max(data) > _ else _
_get_min = lambda data, _: min(data) if min(data) < _ else _
all_label, all_headline, all_data = self.get_severalEOS_uniTildeI_data(
severalEOSs, append = append_stable
)
fig, all_axes = self._get_figure(
2, 1, self._3by1_shareX_grid_placement, height_ratios = [2,1]
)
ax_up = all_axes[0]
ax_down = all_axes[1]
self._set_parms(ax_up, "", r"$I/(MR^2)$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
max_x = max_y = 0
min_x = min_y = 1e9
min_compactness = 0.09
#~ color_fit = "#fcc006"
color_fit = "#fa4224"
#~ color_avg = "#9f2305"
#~ color_avg_worst = "#dfc5fe"
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
coef, rest = polyfit(
x = [ __ for _ in all_data for __ in _[0] ],
y = [ __ for _ in all_data for __ in _[1] ],
deg = [ 0, 1, 4 ],
w = np.sqrt(np.array([ __ for _ in all_data for __ in _[1] ])),
full = True,
)
chi_red = rest[0][0]/(len([ __ for _ in all_data for __ in _[0] ]) - 3)
p = lambda x: coef[0] + coef[1]*x + coef[4]*x**4
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
#~ polyfit is on its own
lines_polyfit = [
Line2D(
[0], [0],
color = color_fit,
marker = None,
linewidth = 2,
linestyle = "-",
label = "poly fit, $\chi_r^2$ = {:.3e}"
"\n {:.3f} + {:.3f}x + {:.3f}$x^4$".format(
chi_red,
coef[0],
coef[1],
coef[4]
)
)
]
ax_up.get_shared_x_axes().join(ax_up, ax_down)
ax_up.set_xticklabels([])
ax_down.set_yscale("log")
self._set_parms(ax_down, "M/R", r"$\left| 1 - \tilde I/\tilde I_{fit} \right|$ ")
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
for label, data, eos in zip( all_label, all_data, severalEOSs ):
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
ax_up.plot(
p_x,
p_y,
color = color_fit,
marker = None,
linewidth = 2,
linestyle = "-",
label = None
)
ax_up.fill_between(
p_x,
np.array(p_y)*(1 + avg_L_inf),
np.array(p_y)*(1 - avg_L_inf),
facecolor=color_avg,
alpha=0.75
)
ax_up.fill_between(
p_x,
np.array(p_y)*( 1 + L_inf_worst ),
np.array(p_y)*( 1 - L_inf_worst ),
facecolor=color_avg_worst,
alpha=0.5
)
ax_up.legend(
handles = [
*lines_markers, *lines_colors, *lines_linestyles, *lines_polyfit
],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1.25,
ncol = 3,
frameon = False,
mode = None
)
ax_up.set_xlim(min_x, max_x)
ax_up.set_ylim(min_y, max_y)
ax_down.set_ylim(1e-3, 1e0)
print(
"\n All fit information"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_0$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
chi_red,
coef[0],
coef[1],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
if n_all_max != len(severalEOSs):
print(
"\n SOMETHING FISSHY, n_all = {}, len EOSs = {} \n".format(
n_all_max, len(severalEOSs))
)
plt.show()
return
def plot_severalEOSs_uniTildeI_polyFitAll_GR(
self, severalEOSs, append_stable = "stable"
):
"""
plot severalEOS unifersal Tilde I relationships
<severalEOSs> with dictionaries see get_severalEOS_data for the format
EXAMPLE INPUT
append_stable = "stable" is set as default, so only stable solutions
will be calculated
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
#~ expand the data and do the polyfit
def _get_polyfit_res(xp, yp):
xp = [ __ for _ in xp for __ in _ ]
yp = [ __ for _ in yp for __ in _ ]
coef, rest = polyfit(
x = xp, y = yp,
deg = [ 0, 1, 4 ],
w = np.sqrt(yp),
full = True
)
#~ calcualte the chi reduce
chi_red = rest[0][0]/(len(xp) - 3)
p = lambda x: coef[0] + coef[1]*x + coef[4]*x**4
return coef, chi_red, p
_get_max = lambda data, _: max(data) if max(data) > _ else _
_get_min = lambda data, _: min(data) if min(data) < _ else _
all_label, all_headline, all_data = self.get_severalEOS_uniTildeI_data(
severalEOSs, append = append_stable
)
all_label_GR, all_headline_GR, all_data_GR = self.get_severalEOS_uniTildeI_data( [
{ "name": _, "beta": 0, "m": 0, "lambda": 0 }
for _ in set( [ _["name"] for _ in severalEOSs ] )
],
append = append_stable
)
fig, all_axes = self._get_figure(
2, 1, self._3by1_shareX_grid_placement, height_ratios = [2,1]
)
ax_up = all_axes[0]
ax_down = all_axes[1]
ax_up.get_shared_x_axes().join(ax_up, ax_down)
ax_up.set_xticklabels([])
ax_down.set_yscale("log")
self._set_parms(ax_down, "M/R", r"$\left| 1 - \tilde I/\tilde I_{fit} \right|$ ")
self._set_parms(ax_up, "", r"$I/(MR^2)$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
max_x = max_y = 0
min_x = min_y = 1e9
min_compactness = 0.09
#~ GR_color_markers = "#ef4026"
GR_color_markers = "#c0022f"
#~ GR_color_markers = "#a9f971"
#~ GR_color_fit = "#ed0dd9"
GR_color_fit = GR_color_markers
plot_alpha = 0.75
#~ lets plot severEOSs on the up plot and eventually cut out data
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
),
alpha = plot_alpha
)
#~ will use this foreach to fill the polyfit for GR
polyfit_res = []
xp = []
yp = []
#~ plot all GR data and gather all x and y for evaluating the polyfit
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1]),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers,
alpha = plot_alpha
)
xp.append(data[0])
yp.append(data[1])
coef, chi_red, p = _get_polyfit_res(xp, yp)
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
#~ for the generated polyfit function calcualte the
#~ relative error and plot it donw for GR
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
print(
"\n GR fit"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_0$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
chi_red,
coef[0],
coef[1],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
lines_polyfit = [
Line2D(
[0], [0],
color = GR_color_fit,
marker = None,
linestyle = "-",
linewidth = 1.5,
label = "GR fit"
)
]
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
#~ generate 100 points between min and max of x and plot it values
ax_up.plot(
p_x,
p_y,
label = None,
linewidth = 2,
linestyle = "-",
markersize = 0,
markevery = 0,
marker = None,
color = GR_color_fit,
)
ax_up.fill_between(
p_x,
np.array(p_y)*(1 + avg_L_inf),
np.array(p_y)*(1 - avg_L_inf),
facecolor=GR_color_markers,
alpha= plot_alpha - 0.25
)
ax_up.fill_between(
p_x,
np.array(p_y)*( 1 + L_inf_worst ),
np.array(p_y)*( 1 - L_inf_worst ),
facecolor=GR_color_markers,
alpha= plot_alpha - 0.5
)
#~ now do the same for each color if there are more than 1
for k, v in colors.items():
#~ the colors will have label key containing the name of parameter
#~ which they represetn
if k == "label":
continue
xp = []
yp = []
for data, eos in zip(all_data, severalEOSs):
#~ if the current eos has parameter value equal to the current one
#~ lets append its data
if eos[colors["label"]] == k:
xp.append( data[0] )
yp.append( data[1] )
#~ expand all the data into flat list to calculate the polyfit
coef, chi_red, p = _get_polyfit_res(xp, yp)
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
for data, eos in zip(all_data, severalEOSs):
#~ if the current eos has parameter value equal to the current one
#~ lets append its data
if eos[colors["label"]] == k:
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
marker = markers.get(eos["name"], None),
color = v,
markerfacecolor = v,
markeredgecolor = v
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
print(
"\n {} {:.3e} fit"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_0$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
"$\\lambda =$ " if colors["label"] == "lambda" else "m =",
k,
chi_red,
coef[0],
coef[1],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
lines_polyfit.append(
Line2D(
[0], [0],
color = v,
marker = None,
linestyle = "-",
linewidth = 1.5,
label = "{} {:.3e} fit".format(
"$\\lambda =$ " if colors["label"] == "lambda" else "m =",
k
)
)
)
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
ax_up.plot(
p_x,
p_y,
label = None,
linewidth = 2,
linestyle = "-",
markersize = 0,
markevery = 0,
marker = None,
color = v,
)
ax_up.fill_between(
p_x,
np.array(p_y)*(1 + avg_L_inf),
np.array(p_y)*(1 - avg_L_inf),
facecolor=v,
alpha= plot_alpha - 0.25
)
ax_up.fill_between(
p_x,
np.array(p_y)*( 1 + L_inf_worst ),
np.array(p_y)*( 1 - L_inf_worst ),
facecolor=v,
alpha= plot_alpha - 0.5
)
#####################################################################
#~ THE CODE BELOW CAN WORK FOR LINSTEYLES DO NOT DELETE
#####################################################################
#~ now do the same for each linestyle if there are more than 1
#for k, v in linestyles.items():
##~ the linestyle will have label key containing the name of parameter
##~ which they represetn
#if k == "label":
#continue
#xp = []
#yp = []
#for data, eos in zip(all_data, severalEOSs):
##~ if the current eos has parameter value equal to the current one
##~ lets append its data
#if eos[linestyles["label"]] == k:
#xp.append( data[0] )
#yp.append( data[1] )
##~ expand all the data into flat list to calculate the polyfit
#coef, chi_red, p = _get_polyfit_res(xp, yp)
#max_y_down = 0
#min_y_down = 1e9
##~ average over all EOSs of all the residuals
#delta_all = 0
#n_all = 0
##~ average over all EOSs of largest residual
#delta_all_max = 0
#n_all_max = 0
##~ the largest residual across all EOSs
#delta_max = 0
#for data, eos in zip(all_data, severalEOSs):
##~ if the current eos has parameter value equal to the current one
##~ lets append its data
#if eos[linestyles["label"]] == k:
#_data = [
#abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
#]
#delta_all += sum(_data)
#n_all += len(_data)
#delta_all_max += max(_data)
#n_all_max += 1
#delta_max = _get_max(_data, delta_max)
#max_y_down = _get_max(_data, max_y_down)
#min_y_down = _get_min(_data, min_y_down)
#ax_down.plot(
#data[0],
#_data,
#label = None,
#linewidth = 0,
#markersize = 5.5,
#markevery = self._get_markevry(data[0], _data),
#marker = markers.get(eos["name"], None),
#color = "k",
#markerfacecolor = "k",
#markeredgecolor = "k"
#)
#avg_L_1 = delta_all/n_all
#avg_L_inf = delta_all_max/n_all_max
#L_inf_worst = delta_max
#print(
#"\n {} {:.3e} fit"
#"\n\t $\chi_r^2$ = {:.3e}"
#"\n\t $a_0$ = {:.3e}"
#"\n\t $a_1$ = {:.3e}"
#"\n\t $a_4$ = {:.3e}"
#"\n\t $< L_1 >$ = {:.3e}"
#"\n\t $< L_\inf >$ = {:.3e}"
#"\n\t $ L_\inf $ = {:.3e}\n".format(
#"$\\lambda =$ " if linestyles["label"] == "lambda" else "m =",
#k,
#chi_red,
#coef[0],
#coef[1],
#coef[4],
#avg_L_1,
#avg_L_inf,
#L_inf_worst
#)
#)
#lines_polyfit.append(
#Line2D(
#[0], [0],
#color = "k",
#marker = None,
#linestyle = v,
#linewidth = 1.5,
#label = "{} {:.3e} fit".format(
#"$\\lambda =$ " if linestyles["label"] == "lambda" else "m =",
#k
#)
#)
#)
#p_x = np.linspace(min_x, max_x, 100)
#p_y = [ p(_) for _ in p_x ]
#ax_up.plot(
#p_x,
#p_y,
#label = None,
#linewidth = 2,
#linestyle = v,
#markersize = 0,
#markevery = 0,
#marker = None,
#color = "k",
#)
#ax_up.fill_between(
#p_x,
#np.array(p_y)*(1 + avg_L_inf),
#np.array(p_y)*(1 - avg_L_inf),
#facecolor="k",
#alpha= plot_alpha - 0.25
#)
#ax_up.fill_between(
#p_x,
#np.array(p_y)*( 1 + L_inf_worst ),
#np.array(p_y)*( 1 - L_inf_worst ),
#facecolor="k",
#alpha= plot_alpha - 0.5
#)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
#~ I think the code below is copied by mistake
#~ commeting out for clearence
#~ ax_up.legend(
#~ handles = [
#~ *lines_markers, *lines_colors, *lines_linestyles, *lines_polyfit
#~ ],
#~ loc="best",
#~ fontsize=8,
#~ handlelength=2,
#~ numpoints=1,
#~ fancybox=True,
#~ markerscale = 1.25,
#~ ncol = 3,
#~ frameon = False,
#~ mode = None
#~ )
ax_up.legend(
handles = [
*lines_markers, *lines_colors, *lines_polyfit
],
loc="best",
fontsize=8,
handlelength=2.5,
numpoints=1,
fancybox=True,
markerscale = 1.25,
ncol = 4,
frameon = False,
mode = None
)
ax_up.set_xlim(min_x, max_x)
ax_up.set_ylim(min_y, max_y)
ax_down.set_ylim(1e-3, 1e0)
plt.show()
return
def plot_severalEOSs_uniTildeI_polyFitAll_GR_ParmsProduct(
self, severalEOSs, append_stable = "stable"
):
"""
plot severalEOS unifersal Tilde I relationships
<severalEOSs> with dictionaries see get_severalEOS_data for the format
EXAMPLE INPUT
append_stable = "stable" is set as default, so only stable solutions
will be calculated
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
#~ expand the data and do the polyfit
def _get_polyfit_res(xp, yp):
xp = [ __ for _ in xp for __ in _ ]
yp = [ __ for _ in yp for __ in _ ]
coef, rest = polyfit(
x = xp, y = yp,
deg = [ 0, 1, 4 ],
w = np.sqrt(yp),
full = True
)
#~ calcualte the chi reduce
chi_red = rest[0][0]/(len(xp) - 3)
p = lambda x: coef[0] + coef[1]*x + coef[4]*x**4
return coef, chi_red, p
_get_max = lambda data, _: max(data) if max(data) > _ else _
_get_min = lambda data, _: min(data) if min(data) < _ else _
all_label, all_headline, all_data = self.get_severalEOS_uniTildeI_data(
severalEOSs, append = append_stable
)
all_label_GR, all_headline_GR, all_data_GR = self.get_severalEOS_uniTildeI_data( [
{ "name": _, "beta": 0, "m": 0, "lambda": 0 }
for _ in set( [ _["name"] for _ in severalEOSs ] )
],
append = append_stable
)
fig, all_axes = self._get_figure(
2, 1, self._3by1_shareX_grid_placement, height_ratios = [2,1]
)
#~ turn off the tight layout since it does not permit sticking the two axes
#~ but can do it manually
fig.subplots_adjust(wspace=0, hspace=0)
ax_up = all_axes[0]
ax_down = all_axes[1]
ax_up.get_shared_x_axes().join(ax_up, ax_down)
ax_up.set_xticklabels([])
ax_down.set_yscale("log")
self._set_parms(
ax_down,
"M/R",
"$\\left| 1 - \\tilde I/\\tilde I_{{fit}} \\right|$"
)
self._set_parms(ax_up, "", r"$I/(MR^2)$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
max_x = max_y = 0
min_x = min_y = 1e9
min_compactness = 0.09
#~ GR_color_markers = "#ef4026"
#~ GR_color_markers = "#469990"
GR_color_markers = "#f58231"
#~ GR_color_markers = "#a9f971"
#~ GR_color_fit = "#ed0dd9"
GR_color_fit = self._luminosity_color(GR_color_markers, 1.1)
plot_alpha = 0.6
#~ lets plot severEOSs on the up plot and eventually cut out data
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1], amount_points=20),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
),
alpha = plot_alpha
)
#~ will use this foreach to fill the polyfit for GR
polyfit_res = []
xp = []
yp = []
#~ plot all GR data and gather all x and y for evaluating the polyfit
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1], amount_points=20),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers,
alpha = plot_alpha
)
xp.append(data[0])
yp.append(data[1])
coef, chi_red, p = _get_polyfit_res(xp, yp)
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
#~ for the generated polyfit function calcualte the
#~ relative error and plot it donw for GR
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers,
alpha = plot_alpha
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
print(
"\n GR fit"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_0$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
chi_red,
coef[0],
coef[1],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
lines_polyfit = [
Line2D(
[0], [0],
color = GR_color_fit,
marker = None,
linestyle = "-",
linewidth = 1.5,
label = "GR fit"
)
]
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
#~ generate 100 points between min and max of x and plot it values
ax_up.plot(
p_x,
p_y,
label = None,
linewidth = 2.5,
linestyle = "-",
markersize = 0,
markevery = 0,
marker = None,
color = GR_color_fit,
zorder = 100
)
ax_up.fill_between(
p_x,
np.array(p_y)*(1 + avg_L_inf),
np.array(p_y)*(1 - avg_L_inf),
facecolor=GR_color_markers,
alpha= plot_alpha - 0.2
)
ax_up.fill_between(
p_x,
np.array(p_y)*( 1 + L_inf_worst ),
np.array(p_y)*( 1 - L_inf_worst ),
facecolor=GR_color_markers,
alpha= plot_alpha - 0.4
)
#~ now do the same for each color if there are more than 1
for c, l in itertools.product(colors.items(), linestyles.items()):
#~ the colors will have label key containing the name of parameter
#~ which they represetn
if c[0] == "label" or l[0] == "label":
continue
xp = []
yp = []
for data, eos in zip(all_data, severalEOSs):
#~ if the current eos has parameter value equal to the current one
#~ lets append its data
if eos[colors["label"]] == c[0] and eos[linestyles["label"]] == l[0]:
xp.append( data[0] )
yp.append( data[1] )
#~ colors and linestyles have all possible combinations of EOSs
#~ but we may not need all of them but only porsion determineed
#~ by severalEOSs, so a quick fix, if no points added we just continue
if not xp:
continue
#~ expand all the data into flat list to calculate the polyfit
coef, chi_red, p = _get_polyfit_res(xp, yp)
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
for data, eos in zip(all_data, severalEOSs):
#~ if the current eos has parameter value equal to the current one
#~ lets append its data
if eos[colors["label"]] == c[0] and eos[linestyles["label"]] == l[0]:
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
marker = markers.get(eos["name"], None),
color = c[1],
markerfacecolor = c[1],
markeredgecolor = c[1],
alpha = plot_alpha - 0.1
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
print(
"\n lambda = {:.3e}, m = {:.3e} fit"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_0$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
l[0] if linestyles["label"] == "lambda" else c[0],
c[0] if colors["label"] == "m" else l[0],
chi_red,
coef[0],
coef[1],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
#~ lines_polyfit.append(
#~ Line2D(
#~ [0], [0],
#~ color = c[1],
#~ marker = None,
#~ linestyle = l[1],
#~ linewidth = 1.5,
#~ label = "$\\lambda$ = {:.3e},\n m = {:.3e} fit".format(
#~ l[0] if linestyles["label"] == "lambda" else c[0],
#~ c[0] if colors["label"] == "m" else l[0]
#~ )
#~ )
#~ )
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
ax_up.plot(
p_x,
p_y,
label = None,
linewidth = 2.5,
linestyle = l[1],
markersize = 0,
markevery = 0,
marker = None,
color = self._luminosity_color(c[1], 1.1),
zorder = 90
)
#~ ax_up.fill_between(
#~ p_x,
#~ np.array(p_y)*(1 + avg_L_inf),
#~ np.array(p_y)*(1 - avg_L_inf),
#~ facecolor=c[1],
#~ alpha= plot_alpha - 0.25
#~ )
#~ ax_up.fill_between(
#~ p_x,
#~ np.array(p_y)*( 1 + L_inf_worst ),
#~ np.array(p_y)*( 1 - L_inf_worst ),
#~ facecolor=c[1],
#~ alpha= plot_alpha - 0.5
#~ )
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles, severalEOSs
)
ax_up.add_artist( ax_up.legend(
handles = [
*lines_markers
],
loc="upper left",
fontsize=10,
handlelength=3,
numpoints=1,
fancybox=True,
markerscale = 1.5,
ncol = 3,
frameon = False,
mode = None
) )
ax_up.add_artist( ax_up.legend(
handles = [
*lines_colors, *lines_polyfit, *lines_linestyles
],
loc="lower right",
fontsize=10,
handlelength=3,
numpoints=1,
fancybox=True,
markerscale = 1.5,
ncol = 2,
frameon = False,
mode = None
) )
ax_up.set_xlim(min_x, max_x)
ax_up.set_ylim(min_y, max_y)
ax_down.set_ylim(1e-3, 1.5e0)
plt.savefig(
'uniTilde.eps', format="eps",
bbox_inches='tight',
dpi=1200,
pad_inches=0
)
plt.show()
return
def plot_severalEOSs_uniBarI(self, severalEOSs ):
"""
plot severalEOS unifersal Tilde I relationships
<severalEOSs> with dictionaries see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
all_label, all_headline, all_data = self.get_severalEOS_uniBarI_data(severalEOSs)
fig, all_axes = self._get_figure(1,1,self._1by1_grid_placement)
ax = all_axes[0]
self._set_parms(ax, "M/R", "$I/(M^3)$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
for label, data, eos in zip( all_label, all_data, severalEOSs ):
ax.plot(
data[0],
data[1],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
ax.legend(
handles = [*lines_markers, *lines_colors, *lines_linestyles],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1.25,
ncol = 3,
frameon = False,
mode = None
)
ax_up.set_xlim(0.09)
plt.show()
return
def plot_severalEOSs_uniBarI_polyFit(
self, severalEOSs, append_stable = "stable"
):
"""
plot severalEOS unifersal Tilde I relationships
<severalEOSs> with dictionaries see get_severalEOS_data for the format
EXAMPLE INPUT
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
_get_max = lambda data, _: max(data) if max(data) > _ else _
_get_min = lambda data, _: min(data) if min(data) < _ else _
all_label, all_headline, all_data = self.get_severalEOS_uniBarI_data(
severalEOSs, append = append_stable
)
fig, all_axes = self._get_figure(
2, 1, self._3by1_shareX_grid_placement, height_ratios = [2,1]
)
ax_up = all_axes[0]
ax_down = all_axes[1]
self._set_parms(ax_up, "", "$I/(M^3)$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
max_x = max_y = 0
min_x = min_y = 1e9
min_compactness = 0.09
color_fit = "#fcc006"
color_avg = "#9f2305"
color_avg_worst = "#dfc5fe"
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested only in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 1.5,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
coef, rest = polyfit(
x = [ __**-1 for _ in all_data for __ in _[0] ],
y = [ __ for _ in all_data for __ in _[1] ],
deg = [ 1, 2, 3, 4 ],
#~ w = np.sqrt(np.array([ __ for _ in all_data for __ in _[1] ])),
full = True
)
chi_red = rest[0][0]/(len([ __ for _ in all_data for __ in _[0] ]) - 4)
p = lambda x: \
coef[1]*(1/x)**1 + coef[2]*(1/x)**2 + coef[3]*(1/x)**3 + coef[4]*(1/x)**4
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
lines_polyfit = [
Line2D(
[0], [0],
color = color_fit,
marker = None,
linewidth = 2,
linestyle = "-",
label = "poly fit, $\chi_r^2$ = {:.3e}"
"\n {:.3f}$(1/x)^1$ + {:.3f}$(1/x)^2$ + {:.3f}$(1/x)^3$ + {:.3f}$(1/x)^4$".format(
chi_red,
coef[1],
coef[2],
coef[3],
coef[4]
)
)
]
ax_up.get_shared_x_axes().join(ax_up, ax_down)
ax_up.set_xticklabels([])
ax_down.set_yscale("log")
self._set_parms(ax_down, "M/R", r"$\left| 1 - \bar I/\bar I_{fit} \right|$")
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
for label, data, eos in zip( all_label, all_data, severalEOSs ):
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
)
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
ax_up.plot(
p_x,
p_y,
color = color_fit,
marker = None,
linewidth = 2,
linestyle = "-",
label = None
)
ax_up.fill_between(
p_x,
np.array(p_y)*( 1 + L_inf_worst ),
np.array(p_y)*( 1 - L_inf_worst ),
facecolor=color_avg_worst,
alpha=0.5
)
ax_up.fill_between(
p_x,
np.array(p_y)*(1 + avg_L_inf),
np.array(p_y)*(1 - avg_L_inf),
facecolor=color_avg,
alpha=0.5
)
ax_up.legend(
handles = [
*lines_markers, *lines_colors, *lines_linestyles, *lines_polyfit
],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1.25,
ncol = 3,
frameon = False,
mode = None
)
ax_up.set_xlim(min_x, max_x)
ax_up.set_ylim(min_y, max_y)
ax_down.set_ylim(1e-3, 1e0)
print(
"\n All fit information"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_2$ = {:.3e}"
"\n\t $a_3$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
chi_red,
coef[1],
coef[2],
coef[3],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
if n_all_max != len(severalEOSs):
print(
"\n SOMETHING FISSHY, n_all = {}, len EOSs = {} \n".format(
n_all_max, len(severalEOSs))
)
plt.show()
return
def plot_severalEOSs_uniBarI_polyFitAll_GR(
self, severalEOSs, append_stable = "stable"
):
"""
plot severalEOS unifersal Tilde I relationships
<severalEOSs> with dictionaries see get_severalEOS_data for the format
EXAMPLE INPUT
append_stable = "stable" is set as default, so only stable solutions
will be calculated
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
#~ expand the data and do the polyfit
def _get_polyfit_res(xp, yp):
xp = [ __**-1 for _ in xp for __ in _ ]
yp = [ __ for _ in yp for __ in _ ]
coef, rest = polyfit(
x = xp, y = yp,
deg = [ 1, 2, 3, 4 ],
#~ w = np.sqrt(yp),
full = True
)
#~ calcualte the chi reduce
chi_red = rest[0][0]/(len(xp) - 4)
p = lambda x: coef[1]*x**-1 + coef[2]*x**-2 + coef[3]*x**-3 + coef[4]*x**-4
return coef, chi_red, p
_get_max = lambda data, _: max(data) if max(data) > _ else _
_get_min = lambda data, _: min(data) if min(data) < _ else _
all_label, all_headline, all_data = self.get_severalEOS_uniBarI_data(
severalEOSs, append = append_stable
)
all_label_GR, all_headline_GR, all_data_GR = self.get_severalEOS_uniBarI_data( [
{ "name": _, "beta": 0, "m": 0, "lambda": 0 }
for _ in set( [ _["name"] for _ in severalEOSs ] )
],
append = append_stable
)
fig, all_axes = self._get_figure(
2, 1, self._3by1_shareX_grid_placement, height_ratios = [2,1]
)
ax_up = all_axes[0]
ax_down = all_axes[1]
ax_up.get_shared_x_axes().join(ax_up, ax_down)
ax_up.set_xticklabels([])
ax_down.set_yscale("log")
self._set_parms(ax_down, "M/R", r"$\left| 1 - \bar I/\bar I_{fit} \right|$ ")
self._set_parms(ax_up, "", r"$I/(M^3)$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
max_x = max_y = 0
min_x = min_y = 1e9
min_compactness = 0.09
#~ GR_color_markers = "#ef4026"
GR_color_markers = "#c0022f"
#~ GR_color_markers = "#a9f971"
#~ GR_color_fit = "#ed0dd9"
GR_color_fit = GR_color_markers
plot_alpha = 0.75
#~ lets plot severEOSs on the up plot and eventually cut out data
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1]),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
),
alpha = plot_alpha
)
#~ will use this foreach to fill the polyfit for GR
polyfit_res = []
xp = []
yp = []
#~ plot all GR data and gather all x and y for evaluating the polyfit
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1]),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers,
alpha = plot_alpha
)
xp.append(data[0])
yp.append(data[1])
coef, chi_red, p = _get_polyfit_res(xp, yp)
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
#~ for the generated polyfit function calcualte the
#~ relative error and plot it donw for GR
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
print(
"\n GR fit"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_2$ = {:.3e}"
"\n\t $a_3$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
chi_red,
coef[1],
coef[2],
coef[3],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
#~ add the polyfit lien as entry in polyfit lines to be displayed in legend
lines_polyfit = [
Line2D(
[0], [0],
color = GR_color_fit,
marker = None,
linestyle = "-",
linewidth = 1.5,
label = "GR fit"
)
]
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
#~ generate 100 points between min and max of x and plot it values
ax_up.plot(
p_x,
p_y,
label = None,
linewidth = 2,
linestyle = "-",
markersize = 0,
markevery = 0,
marker = None,
color = GR_color_fit,
)
ax_up.fill_between(
p_x,
np.array(p_y)*(1 + avg_L_inf),
np.array(p_y)*(1 - avg_L_inf),
facecolor=GR_color_markers,
alpha= plot_alpha - 0.25
)
ax_up.fill_between(
p_x,
np.array(p_y)*( 1 + L_inf_worst ),
np.array(p_y)*( 1 - L_inf_worst ),
facecolor=GR_color_markers,
alpha= plot_alpha - 0.5
)
#~ now do the same for each color if there are more than 1
for k, v in colors.items():
#~ the colors will have label key containing the name of parameter
#~ which they represetn
if k == "label":
continue
xp = []
yp = []
for data, eos in zip(all_data, severalEOSs):
#~ if the current eos has parameter value equal to the current one
#~ lets append its data
if eos[colors["label"]] == k:
xp.append( data[0] )
yp.append( data[1] )
#~ expand all the data into flat list to calculate the polyfit
coef, chi_red, p = _get_polyfit_res(xp, yp)
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
for data, eos in zip(all_data, severalEOSs):
#~ if the current eos has parameter value equal to the current one
#~ lets append its data
if eos[colors["label"]] == k:
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
marker = markers.get(eos["name"], None),
color = v,
markerfacecolor = v,
markeredgecolor = v
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
print(
"\n {} {:.3e} fit"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_2$ = {:.3e}"
"\n\t $a_3$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
"$\\lambda =$ " if colors["label"] == "lambda" else "m =",
k,
chi_red,
coef[1],
coef[2],
coef[3],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
lines_polyfit.append(
Line2D(
[0], [0],
color = v,
marker = None,
linestyle = "-",
linewidth = 1.5,
label = "{} {:.3e} fit".format(
"$\\lambda =$ " if colors["label"] == "lambda" else "m =",
k
)
)
)
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
ax_up.plot(
p_x,
p_y,
label = None,
linewidth = 2,
linestyle = "-",
markersize = 0,
markevery = 0,
marker = None,
color = v,
)
ax_up.fill_between(
p_x,
np.array(p_y)*(1 + avg_L_inf),
np.array(p_y)*(1 - avg_L_inf),
facecolor=v,
alpha= plot_alpha - 0.25
)
ax_up.fill_between(
p_x,
np.array(p_y)*( 1 + L_inf_worst ),
np.array(p_y)*( 1 - L_inf_worst ),
facecolor=v,
alpha= plot_alpha - 0.5
)
#~ THERE IS ADDITIONAL CODE FOR LINSTYLES WHICH IS NOT TRANSFERED HERE
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles
)
ax_up.legend(
handles = [
*lines_markers, *lines_colors, *lines_polyfit
],
loc="best",
fontsize=8,
handlelength=2,
numpoints=1,
fancybox=True,
markerscale = 1.25,
ncol = 4,
frameon = False,
mode = None
)
ax_up.set_xlim(min_x, max_x)
ax_up.set_ylim(min_y, max_y)
ax_down.set_ylim(1e-3, 1e0)
plt.show()
return
def plot_severalEOSs_uniBarI_polyFitAll_GR_ParmsProduct(
self, severalEOSs, append_stable = "stable"
):
"""
plot severalEOS unifersal Tilde I relationships
<severalEOSs> with dictionaries see get_severalEOS_data for the format
EXAMPLE INPUT
append_stable = "stable" is set as default, so only stable solutions
will be calculated
severalEOSs = [
{ "name": "SLy4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "APR4", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "FPS", "beta": 0, "m": 0, "lambda": 0 },
{ "name": "WFF2", "beta": 0, "m": 0, "lambda": 0 }
]
"""
#~ expand the data and do the polyfit
def _get_polyfit_res(xp, yp):
xp = [ __**-1 for _ in xp for __ in _ ]
yp = [ __ for _ in yp for __ in _ ]
coef, rest = polyfit(
x = xp, y = yp,
deg = [ 1, 2, 3, 4 ],
#~ w = np.sqrt(yp),
full = True
)
#~ calcualte the chi reduce
chi_red = rest[0][0]/(len(xp) - 4)
p = lambda x: coef[1]*x**-1 + coef[2]*x**-2 + coef[3]*x**-3 + coef[4]*x**-4
return coef, chi_red, p
_get_max = lambda data, _: max(data) if max(data) > _ else _
_get_min = lambda data, _: min(data) if min(data) < _ else _
all_label, all_headline, all_data = self.get_severalEOS_uniBarI_data(
severalEOSs, append = append_stable
)
all_label_GR, all_headline_GR, all_data_GR = self.get_severalEOS_uniBarI_data( [
{ "name": _, "beta": 0, "m": 0, "lambda": 0 }
for _ in set( [ _["name"] for _ in severalEOSs ] )
],
append = append_stable
)
fig, all_axes = self._get_figure(
2, 1, self._3by1_shareX_grid_placement, height_ratios = [2,1]
)
#~ turn off the tight layout since it does not permit sticking the two axes
#~ but can do it manually
fig.subplots_adjust(wspace=0, hspace=0)
ax_up = all_axes[0]
ax_down = all_axes[1]
ax_up.get_shared_x_axes().join(ax_up, ax_down)
ax_up.set_xticklabels([])
ax_down.set_yscale("log")
self._set_parms(ax_down, "M/R", r"$\left| 1 - \bar I/\bar I_{fit} \right|$ ")
self._set_parms(ax_up, "", r"$I/(M^3)$")
markers, colors, linestyles = self._get_MSs_Cs_LSs(severalEOSs)
max_x = max_y = 0
min_x = min_y = 1e9
min_compactness = 0.09
#~ GR_color_markers = "#ef4026"
#~ GR_color_markers = "#469990"
GR_color_markers = "#f58231"
#~ GR_color_markers = "#a9f971"
#~ GR_color_fit = "#ed0dd9"
GR_color_fit = self._luminosity_color(GR_color_markers, 1.1)
plot_alpha = 0.6
#~ lets plot severEOSs on the up plot and eventually cut out data
for label, data, eos in zip( all_label, all_data, severalEOSs ):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1],amount_points=15),
**self._get_plot_keywords(
markers, colors, linestyles,
{
"name": eos["name"],
"m": eos["m"],
"lambda": eos["lambda"]
}
),
alpha = plot_alpha
)
#~ will use this foreach to fill the polyfit for GR
polyfit_res = []
xp = []
yp = []
#~ plot all GR data and gather all x and y for evaluating the polyfit
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
#~ we set mimimal compactenss threshold and cut out all entries
#~ who are below it only if we are interested in stable solutions
if append_stable:
_min_x = list(
map(lambda _: _ >= min_compactness, data[0])
).index(True)
data[0] = [ _ for _ in data[0][_min_x:] ]
data[1] = [ _ for _ in data[1][_min_x:] ]
max_x = _get_max(data[0], max_x)
min_x = _get_min(data[0], min_x)
max_y = _get_max(data[1], max_y)
min_y = _get_min(data[1], min_y)
ax_up.plot(
data[0],
data[1],
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], data[1],amount_points=15),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers,
alpha = plot_alpha
)
xp.append(data[0])
yp.append(data[1])
coef, chi_red, p = _get_polyfit_res(xp, yp)
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
#~ for the generated polyfit function calcualte the
#~ relative error and plot it donw for GR
for label, data, eos in zip(
all_label_GR,
all_data_GR,
[ _ for _ in set( [ _["name"] for _ in severalEOSs ] ) ]
):
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
marker = markers.get(eos, None),
color = GR_color_markers,
markerfacecolor = GR_color_markers,
markeredgecolor = GR_color_markers,
alpha = plot_alpha
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
print(
"\n GR fit"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_2$ = {:.3e}"
"\n\t $a_3$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
chi_red,
coef[1],
coef[2],
coef[3],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
lines_polyfit = [
Line2D(
[0], [0],
color = GR_color_fit,
marker = None,
linestyle = "-",
linewidth = 1.5,
label = "GR fit"
)
]
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
#~ generate 100 points between min and max of x and plot it values
ax_up.plot(
p_x,
p_y,
label = None,
linewidth = 2.5,
linestyle = "-",
markersize = 0,
markevery = 0,
marker = None,
color = GR_color_fit,
zorder = 100
)
ax_up.fill_between(
p_x,
np.array(p_y)*(1 + avg_L_inf),
np.array(p_y)*(1 - avg_L_inf),
facecolor=GR_color_markers,
alpha= plot_alpha - 0.2
)
ax_up.fill_between(
p_x,
np.array(p_y)*( 1 + L_inf_worst ),
np.array(p_y)*( 1 - L_inf_worst ),
facecolor=GR_color_markers,
alpha= plot_alpha - 0.4
)
#~ now do the same for each color if there are more than 1
for c, l in itertools.product(colors.items(), linestyles.items()):
#~ the colors will have label key containing the name of parameter
#~ which they represetn
if c[0] == "label" or l[0] == "label":
continue
xp = []
yp = []
for data, eos in zip(all_data, severalEOSs):
#~ if the current eos has parameter value equal to the current one
#~ lets append its data
if eos[colors["label"]] == c[0] and eos[linestyles["label"]] == l[0]:
xp.append( data[0] )
yp.append( data[1] )
#~ colors and linestyles have all possible combinations of EOSs
#~ but we may not need all of them but only porsion determineed
#~ by severalEOSs, so a quick fix, if no points added we just continue
if not xp:
continue
#~ expand all the data into flat list to calculate the polyfit
coef, chi_red, p = _get_polyfit_res(xp, yp)
max_y_down = 0
min_y_down = 1e9
#~ average over all EOSs of all the residuals
delta_all = 0
n_all = 0
#~ average over all EOSs of largest residual
delta_all_max = 0
n_all_max = 0
#~ the largest residual across all EOSs
delta_max = 0
for data, eos in zip(all_data, severalEOSs):
#~ if the current eos has parameter value equal to the current one
#~ lets append its data
if eos[colors["label"]] == c[0] and eos[linestyles["label"]] == l[0]:
_data = [
abs(1 - _/p(__)) for _,__ in zip(data[1], data[0])
]
delta_all += sum(_data)
n_all += len(_data)
delta_all_max += max(_data)
n_all_max += 1
delta_max = _get_max(_data, delta_max)
max_y_down = _get_max(_data, max_y_down)
min_y_down = _get_min(_data, min_y_down)
ax_down.plot(
data[0],
_data,
label = None,
linewidth = 0,
markersize = 5.5,
markevery = self._get_markevry(data[0], _data),
marker = markers.get(eos["name"], None),
color = c[1],
markerfacecolor = c[1],
markeredgecolor = c[1],
alpha = plot_alpha - 0.1
)
avg_L_1 = delta_all/n_all
avg_L_inf = delta_all_max/n_all_max
L_inf_worst = delta_max
print(
"\n lambda = {:.3e}, m = {:.3e} fit"
"\n\t $\chi_r^2$ = {:.3e}"
"\n\t $a_1$ = {:.3e}"
"\n\t $a_2$ = {:.3e}"
"\n\t $a_3$ = {:.3e}"
"\n\t $a_4$ = {:.3e}"
"\n\t $< L_1 >$ = {:.3e}"
"\n\t $< L_\inf >$ = {:.3e}"
"\n\t $ L_\inf $ = {:.3e}\n".format(
l[0] if linestyles["label"] == "lambda" else c[0],
c[0] if colors["label"] == "m" else l[0],
chi_red,
coef[1],
coef[2],
coef[3],
coef[4],
avg_L_1,
avg_L_inf,
L_inf_worst
)
)
#~ lines_polyfit.append(
#~ Line2D(
#~ [0], [0],
#~ color = c[1],
#~ marker = None,
#~ linestyle = l[1],
#~ linewidth = 1.5,
#~ label = "$\\lambda$ = {:.3e},\n m = {:.3e} fit".format(
#~ l[0] if linestyles["label"] == "lambda" else c[0],
#~ c[0] if colors["label"] == "m" else l[0]
#~ )
#~ )
#~ )
p_x = np.linspace(min_x, max_x, 100)
p_y = [ p(_) for _ in p_x ]
ax_up.plot(
p_x,
p_y,
label = None,
linewidth = 2.5,
linestyle = l[1],
markersize = 0,
markevery = 0,
marker = None,
color = self._luminosity_color(c[1], 1.1),
zorder = 90
)
#~ ax_up.fill_between(
#~ p_x,
#~ np.array(p_y)*(1 + avg_L_inf),
#~ np.array(p_y)*(1 - avg_L_inf),
#~ facecolor=c[1],
#~ alpha= plot_alpha - 0.25
#~ )
#~ ax_up.fill_between(
#~ p_x,
#~ np.array(p_y)*( 1 + L_inf_worst ),
#~ np.array(p_y)*( 1 - L_inf_worst ),
#~ facecolor=c[1],
#~ alpha= plot_alpha - 0.5
#~ )
lines_markers, lines_colors, lines_linestyles = self._get_lines_MSs_Cs_LSs(
markers, colors, linestyles, severalEOSs
)
ax_up.add_artist( ax_up.legend(
handles = [
*lines_markers
],
loc="upper center",
fontsize=10,
handlelength=3,
numpoints=1,
fancybox=True,
markerscale = 1.5,
ncol = 3,
frameon = False,
mode = None
) )
ax_up.add_artist( ax_up.legend(
handles = [
*lines_colors, *lines_polyfit, *lines_linestyles
],
loc="upper right",
fontsize=10,
handlelength=3,
numpoints=1,
fancybox=True,
markerscale = 1.5,
ncol = 1,
frameon = False,
mode = None
) )
ax_up.set_xlim(min_x, max_x)
ax_up.set_ylim(min_y, max_y)
ax_down.set_ylim(1e-3, 1.5e0)
plt.savefig(
'uniBar.eps', format="eps",
bbox_inches='tight',
dpi=1200,
pad_inches=0
)
plt.show()
return
def foo(self,severalEOSs):
self.plot_severalEOSs_MvsR(severalEOSs)
self.plot_severalEOSs_phiScal_cVSp_c(severalEOSs)
return
def foo2(self,severalEOSs):
self.plot_latest_resEOSname_severalEOSs(severalEOSs)
return
def foo_uniI(self,severalEOSs):
self.plot_severalEOSs_uniTildeI_polyFit(severalEOSs)
self.plot_severalEOSs_uniBarI_polyFit(severalEOSs)
return
@staticmethod
def _get_figure(nrows, ncols, grid_placement, height_ratios=None):
"""
create Figure and assign specific subplot geometry defined by
<nrows> amount of rows and <ncols> amount of columns
defined using the function <grid_placement>
Parameters
----------
nrows: int
amount of rows the figure will have
ncols: int
amount of columns the figure will have
grid_placement: class
function to define the placement of the subplots in the
figure using <nrows> and <ncols> and return
list of axes
Returns
-------
fig: Figure
the figure itself
: list
the list of axes which are associated with the subplot
placement defined by <grid_placement> function using
GridSpec class
"""
#~ style.use("seaborn-poster")
gs = GridSpec(nrows=nrows, ncols=ncols, height_ratios = height_ratios)
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
fig = plt.figure(figsize=(9.60,7.20))
fig.set_tight_layout(False)
fig.set_rasterized(True)
return fig, grid_placement(fig, gs)
@staticmethod
def _4by4_grid_placement(fig, gs):
"""
create grid which looks like
+---------------------------------+
| | |
| Y0 vs X0 | Y1 vs X1 |
| ax[0] | ax[1] |
|------------------+--------------|
| | |
| Y2 vs X2 | Y3 vs X3 |
| ax[2] | ax[3] |
+---------------------------------+
"""
return [
fig.add_subplot( gs[0,0]),
fig.add_subplot( gs[0,1]),
fig.add_subplot( gs[1,0]),
fig.add_subplot( gs[1,1]),
]
@staticmethod
def _1by1_grid_placement(fig, gs):
"""
create grid which looks like
+----------------------+
| |
| Y0 vs X0 |
| |
+----------------------+
"""
return [
fig.add_subplot(gs[:]),
]
@staticmethod
def _3by1_shareX_grid_placement(fig, gs):
"""
create grid which looks like ( proportion 1 : 2 )
+----------------------+
| |
| Y0 vs X0 |
| |
| |
| |
| |
+----------------------+ shared Ox
| |
| Y1 vs X1 |
| |
+----------------------+
"""
up = fig.add_subplot( gs[0:1,0] )
down = fig.add_subplot( gs[1,0] )
return [ up, down ]
@staticmethod
def _set_parms(
ax, label_x, label_y, fontsize=14, x_ticksize = 14, y_ticksize = 14,
x_format_str = "", y_format_str = ""
):
"""
set the parameters of the provided axes <ax>; it will modify to
specific size the fonts and tick of the ax; also sets the
format of the numbers on each axis and the labels using <label_x> and
<label_y>
Parameter
---------
<ax>: class
the specific ax whos parameters will be set
<label_x>: string
the abscissa label
<label_y>: string
the ordinate label
Return
------
"""
from matplotlib.ticker import FormatStrFormatter
ax.tick_params(direction="in")
if label_x:
ax.set_xlabel(label_x, fontsize=fontsize)
if x_ticksize:
ax.xaxis.set_tick_params(labelsize=x_ticksize)
if x_format_str:
ax.xaxis.set_major_formatter(FormatStrFormatter(x_format_str))
if label_y:
ax.set_ylabel(label_y, fontsize=fontsize)
if y_ticksize:
ax.yaxis.set_tick_params(labelsize=y_ticksize)
if y_format_str:
ax.yaxis.set_major_formatter(FormatStrFormatter(y_format_str))
return
@staticmethod
def _get_parameter_values(label):
val_eosName = ""
val_beta = 0
val_m = 0
val_lambda = 0
for _ in label.split("_"):
if "beta" in _:
val_beta = float(_.replace("beta", ""))
elif "lambda" in _:
val_lambda = float(_.replace("lambda", ""))
elif "m" in _:
val_m = float(_.replace("m", ""))
elif "STT" not in _ and "phiScal" not in _ and "J" not in _:
val_eosName = _
return val_eosName, val_beta, val_m, val_lambda
@staticmethod
def _units_coef_clac():
"""
Calculates the unit coefficients
Parameters
----------
Returns
-------
: dictionary
"density" in (double) g cm^-3
"pressure" in (double) dyns cm^-3
"r" in (double) km
"j" in (double) m^2 kg
"""
# mas of sun in kg
const_msun = 1.9891e30
# gravitational const in m^3kg^-1s^-2
const_g = 6.67384e-11
# speed of light in ms^-1
const_c = 299792458
units = {}
# units of density in g cm^-3
units["density"] = 1e-3 * const_c**6 / (const_g**3 * const_msun**2)
# units of pressure in dyns cm^-3
units["pressure"] = const_c**8 / (const_g**3 * const_msun**2) * 10
# units of rad coordinate in km
units["R"] = 1e-3 * const_g * const_msun / const_c**2
# units of moment of inertia
units["J"] = 1e7 * const_g**2 * const_msun**3 / const_c**4
return units
@staticmethod
def _get_ls_lc_ms_mc():
"""
get tuple for
ls ---> line style
lc ---> line color
ms ---> marker style
mc----> marker color
"""
markerstyles = [
"s", ">", "<", "^", "v", "o", "X", "P", "d", "D", "H", "*", "p",
]
linestyles = [
":", "-.", "--", "-"
]
colors = [
"b", "g", "r", "c", "y", "k"
]
return random.sample(
set(
itertools.product(linestyles,colors,markerstyles, colors)
), 1
)[0]
@staticmethod
def _get_specific_ms(map_me, label):
"""
for provided list return list of dictionaries, each having the
entry of the provided list as key and marker style as value
it is here because to be easier to find
"""
all_makrers_styles = [
"s", "8", ">", "<", "^", "v", "o",
"X", "P", "d", "D", "H", "h", "*", "p",
"$\\bowtie$", "$\\clubsuit$", "$\\diamondsuit$", "$\\heartsuit$",
"$\\spadesuit$",
"$\\bigotimes$", "$\\bigoplus$",
]
if len(map_me) > len(all_makrers_styles):
print(
"\n not enough markers, amount of markers are {}\n".format(
len(all_makrers_styles)
)
)
return
#~ shuffle the list just for fun
for _ in range(5):
random.shuffle(all_makrers_styles)
res = {
_: __ for _, __ in zip(map_me, all_makrers_styles)
}
res.update({"label": label})
return res
def _get_specific_ls(self, map_me, label):
"""
for provided list return list of dictionaries, each having the
entry of the provided list as key and line style as value
it is here because to be easier to find
"""
if self.specific_ls:
return self.specific_ls
#~ all_line_styles = [
#~ ":", "-.", "--",
#~ (0, (5, 1, 1, 1, 1, 1)),
#~ (0, (5, 1, 1, 1, 1, 1, 1, 1)),
#~ (0, (5, 1, 1, 1, 1, 1, 1, 1, 1, 1)),
#~ (0, (8, 1, 1, 1, 3, 1, 1, 1))
#~ ]
all_line_styles = [
"-.", "--",
#~ (0, (5, 1, 1, 1, 1, 1)),
(0, (5, 1, 1, 1, 1, 1, 1, 1)),
#~ (0, (5, 1, 1, 1, 1, 1, 1, 1, 1, 1)),
(0, (8, 1, 1, 1, 3, 1, 1, 1))
]
if len(map_me) > len(all_line_styles):
print(
"\n not enough markers, amount of lines are {}\n".format(
len(all_line_styles)
)
)
return
#~ shuffle the list just for fun
for _ in range(5):
random.shuffle(all_line_styles)
res = {
_: __ for _, __ in zip(map_me, all_line_styles)
}
res.update({"label": label})
return res
def _get_specific_c(self, map_me, label):
"""
for provided list return list of dictionaries, each having the
entry of the provided list as key and colour as value
it is here because to be easier to find
"""
if self.specific_c:
return self.specific_c
#~ all_colors = [
#~ "b", "g", "r", "c", "m", "y"
#~ ]
#~ all_colors = [
#~ "#e6194B", "#3cb44b", "#4363d8"
#~ ]
#~ all_colors = [
#~ "#800000", "#4363d8", "#f58231"
#~ ]
all_colors = [
"#e6194B", "#3cb44b", "#4363d8"
]
if len(map_me) > len(all_colors):
print(
"\n not enough markers, amount of lines are {}\n".format(
len(all_line_styles)
)
)
return
#~ shuffle the list just for fun
for _ in range(5):
random.shuffle(all_colors)
res = {
_: __ for _, __ in zip(map_me, all_colors)
}
res.update({"label": label})
return res
@staticmethod
def _get_markevry(data_x, data_y, amount_points = 5):
"""
for provided data list get the difference between the max and min to
evluate the stem for getting amount_points and return list of
indexes of points closes to the step
solution used from
https://stackoverflow.com/questions/9873626/choose-m-evenly-spaced-elements-from-a-sequence-of-length-n
"""
def _get_EvenlySpacedIdxs(data, amount_points):
evenly_spaced, step = np.linspace(
min(data, key=abs), max(data, key=abs),
num = amount_points,
endpoint = True,
retstep = True
)
step = abs(step)
evenly_spaced_idx = [ 0 ]
cumitv_step = 0
for _, __ in enumerate(np.diff(data)):
cumitv_step += abs(__)
if cumitv_step > step:
evenly_spaced_idx.append(_)
cumitv_step = 0
if abs(data[-1] - data[evenly_spaced_idx[-1]]) > step:
evenly_spaced_idx.append(len(data)-1)
return evenly_spaced_idx
#~ Valid for the above function
#~ idx_x = _get_EvenlySpacedIdxs(data_x, amount_points)
#~ idx_y = _get_EvenlySpacedIdxs(data_y, amount_points)
#~ return list( set(idx_x).union(idx_y) )
def _get_CummalativeEvenlySpacedIdxs(data_x, data_y, amount_points):
step_x = abs(max(data_x, key=abs) - min(data_x, key=abs))/amount_points
step_y = abs(max(data_y, key=abs) - min(data_y, key=abs))/amount_points
idx = [ 0 ]
cumitv_step_x = 0
cumitv_step_y = 0
for i, (v_x, v_y) in enumerate(zip(np.diff(data_x), np.diff(data_y))):
cumitv_step_x += abs(v_x)
cumitv_step_y += abs(v_y)
if cumitv_step_x > step_x or cumitv_step_y > step_y:
idx.append(i)
cumitv_step_x = 0
cumitv_step_y = 0
return idx
return _get_CummalativeEvenlySpacedIdxs(data_x, data_y, amount_points/2)
@staticmethod
def _get_plot_keywords( markers, colors, linestyles, current ):
"""
for provided dictionaries for markers, colors and linestyles
for provided list <current> which contains the current eos name,
m value and lambda value
search where the eos name, m and lambda value are and subscribe its value
the label is returned last
"""
marker = None
color = None
linestyle = None
for _ in current.keys():
if _ == markers["label"]:
marker = markers.get(current[_])
elif _ == colors["label"]:
color = colors.get(current[_])
elif _ == linestyles["label"]:
linestyle = linestyles.get(current[_])
return {
"color": color,
"linestyle": linestyle,
"marker": marker,
"markerfacecolor": color,
"markeredgecolor": color
}
def _get_MSs_Cs_LSs(self,_severalEOSs):
tmp_ms = None
if self.specific_ms:
tmp_ms = self.specific_ms
else:
print("\n did you forgot to set_severalEOSs_ms_ls_c ?? \n")
tmp_ms = self._get_specific_ms(
list( set( [ _["name"] for _ in _severalEOSs ] ) ), "name"
)
tmp_c = None
if self.specific_c:
tmp_c = self.specific_c
else:
print("\n did you forgot to set_severalEOSs_ms_ls_c ?? \n")
tmp_c = self._get_specific_c(
list( set( [ _["m"] for _ in _severalEOSs ] ) ), "m"
)
tmp_ls = None
if self.specific_ls:
tmp_ls = self.specific_ls
else:
print("\n did you forgot to set_severalEOSs_ms_ls_c ?? \n")
tmp_ls = self._get_specific_ls(
list( set( [ _["lambda"] for _ in _severalEOSs ] ) ), "lambda"
)
return tmp_ms, tmp_c, tmp_ls
@staticmethod
def _get_lines_MSs_Cs_LSs(markers, colors, linestyles, severalEOSs = None ):
def _convert_sci(num):
snum = "{:.2e}".format(num)
mantisa, power = snum.split("e")
mantisa = "{}".format(mantisa)
power = "{}".format(power)
return " ${{{:.0f}}} \\times 10^{{{:.0f}}}$".format(
float(mantisa), float(power)
) if float(mantisa) \
else " ${{{:.0f}}}$".format(
float(mantisa)
)
chs_lambda_m = lambda _: "$\\lambda =$ " if _ == "lambda" else "m = "
if not severalEOSs:
lines_markers = [
Line2D(
[0], [0],
color="k",
marker = __,
linewidth = 0,
label = _
)
for _, __ in markers.items() if _ != "label"
]
lines_colors = [
Line2D(
[0], [0],
color=__,
marker = None,
linewidth = 1.5,
linestyle = "-",
label = chs_lambda_m(colors["label"]) + _convert_sci(_)
)
for _, __ in colors.items() if _ != "label"
]
lines_linestyles = [
Line2D(
[0], [0],
color="k",
marker = None,
linewidth = 1.5,
linestyle = __,
label = chs_lambda_m(linestyles["label"]) + _convert_sci(_)
)
for _, __ in linestyles.items() if _ != "label"
]
else:
lines_markers = [
Line2D(
[0], [0],
color="k",
marker = markers[_],
linewidth = 0,
label = _
)
for _ in list( set( [ _["name"] for _ in severalEOSs ] ) )
]
lines_colors = [
Line2D(
[0], [0],
color=colors[_],
marker = None,
linewidth = 1.5,
linestyle = "-",
label = chs_lambda_m(colors["label"]) + _convert_sci(_)
)
for _ in list( set( [ _[colors["label"]] for _ in severalEOSs ] ) )
]
lines_linestyles = [
Line2D(
[0], [0],
color="k",
marker = None,
linewidth = 1.5,
linestyle = linestyles[_],
label = chs_lambda_m(linestyles["label"]) + _convert_sci(_)
)
for _ in list( set( [ _[linestyles["label"]] for _ in severalEOSs ] ) )
]
return lines_markers, lines_colors, lines_linestyles
if __name__ == "__main__":
print("\n asd \n")
| 30.525726
| 111
| 0.4487
| 17,163
| 152,476
| 3.730583
| 0.041601
| 0.01187
| 0.008059
| 0.009402
| 0.860077
| 0.846989
| 0.835447
| 0.827982
| 0.815971
| 0.811379
| 0
| 0.030908
| 0.434088
| 152,476
| 4,994
| 112
| 30.531838
| 0.711119
| 0.177923
| 0
| 0.765242
| 0
| 0.000943
| 0.053639
| 0.000833
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019485
| false
| 0
| 0.005028
| 0.000629
| 0.046512
| 0.008171
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49dfc40877fb21a42f7968dd487204acbb3be7e9
| 914
|
py
|
Python
|
projetoWebPet/crud/models.py
|
annaladewig/exercicios-back-end-unit
|
c83e633bc58ccf7ec4cd6f3d96d66019bf4494d6
|
[
"MIT"
] | null | null | null |
projetoWebPet/crud/models.py
|
annaladewig/exercicios-back-end-unit
|
c83e633bc58ccf7ec4cd6f3d96d66019bf4494d6
|
[
"MIT"
] | null | null | null |
projetoWebPet/crud/models.py
|
annaladewig/exercicios-back-end-unit
|
c83e633bc58ccf7ec4cd6f3d96d66019bf4494d6
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Animals(models.Model):
nome= models.CharField(
max_length=255,
null= False,
blank= False
)
raca = models.CharField(
max_length= 255,
null= False,
blank=False
)
especie = models.CharField(
max_length= 255,
null= False,
blank=False
)
peso = models.CharField(
max_length=14,
null=False,
blank=False
)
dataNascimento = models.CharField(
max_length=14,
null=False,
blank=False
)
donoPet = models.CharField(
max_length=255,
null=False,
blank=False
)
telefone=models.CharField(
max_length= 14,
null= False,
blank=False
)
endereco = models.CharField(
max_length=255,
null=False,
blank=False
)
| 16.618182
| 38
| 0.54267
| 93
| 914
| 5.247312
| 0.290323
| 0.245902
| 0.295082
| 0.393443
| 0.747951
| 0.747951
| 0.747951
| 0.747951
| 0.747951
| 0
| 0
| 0.036522
| 0.370897
| 914
| 54
| 39
| 16.925926
| 0.812174
| 0.026258
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b70755cca76e21e0a7624fd2db26d96fa6881ba5
| 184
|
py
|
Python
|
node_editor/__init__.py
|
lcopey/node_editor
|
04d56ae4c7f2149e46903d5dd2e46f3906ef69e6
|
[
"MIT"
] | 1
|
2021-04-30T11:28:42.000Z
|
2021-04-30T11:28:42.000Z
|
node_editor/__init__.py
|
lcopey/node_editor
|
04d56ae4c7f2149e46903d5dd2e46f3906ef69e6
|
[
"MIT"
] | null | null | null |
node_editor/__init__.py
|
lcopey/node_editor
|
04d56ae4c7f2149e46903d5dd2e46f3906ef69e6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Node editor implementation.
Contains multiple classes"""
from .node_editor_window import NodeEditorWindow
from .node_editor_widget import NodeEditorWidget
| 23
| 48
| 0.788043
| 21
| 184
| 6.714286
| 0.714286
| 0.212766
| 0.198582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006135
| 0.11413
| 184
| 7
| 49
| 26.285714
| 0.858896
| 0.423913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b7246d59d67bf0950312dcac8d648bbb539e0f1f
| 76
|
py
|
Python
|
asyncnostic/__init__.py
|
DerekYu177/asyncnostic
|
db1fcf1b0a69e70cdc1ab8465af3ff286af7e57a
|
[
"MIT"
] | null | null | null |
asyncnostic/__init__.py
|
DerekYu177/asyncnostic
|
db1fcf1b0a69e70cdc1ab8465af3ff286af7e57a
|
[
"MIT"
] | 3
|
2019-07-26T03:11:45.000Z
|
2020-01-19T20:46:22.000Z
|
asyncnostic/__init__.py
|
DerekYu177/asyncnostic
|
db1fcf1b0a69e70cdc1ab8465af3ff286af7e57a
|
[
"MIT"
] | null | null | null |
from .legacy import asyncnostic
from .legacy import v1
from .main import v2
| 19
| 31
| 0.802632
| 12
| 76
| 5.083333
| 0.583333
| 0.327869
| 0.52459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.157895
| 76
| 3
| 32
| 25.333333
| 0.921875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3f915d388689cec93210d23a46011237f3f12ecb
| 4,241
|
py
|
Python
|
test/test_storage_project_disk_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
test/test_storage_project_disk_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
test/test_storage_project_disk_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
"""
HyperOne
HyperOne API # noqa: E501
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
"""
import unittest
import h1
from h1.api.storage_project_disk_api import StorageProjectDiskApi # noqa: E501
class TestStorageProjectDiskApi(unittest.TestCase):
"""StorageProjectDiskApi unit test stubs"""
def setUp(self):
self.api = StorageProjectDiskApi() # noqa: E501
def tearDown(self):
pass
def test_storage_project_disk_create(self):
"""Test case for storage_project_disk_create
Create storage/disk # noqa: E501
"""
pass
def test_storage_project_disk_delete(self):
"""Test case for storage_project_disk_delete
Delete storage/disk # noqa: E501
"""
pass
def test_storage_project_disk_detach(self):
"""Test case for storage_project_disk_detach
Detach storage/disk # noqa: E501
"""
pass
def test_storage_project_disk_download(self):
"""Test case for storage_project_disk_download
Download storage/disk # noqa: E501
"""
pass
def test_storage_project_disk_event_get(self):
"""Test case for storage_project_disk_event_get
Get storage/disk.event # noqa: E501
"""
pass
def test_storage_project_disk_event_list(self):
"""Test case for storage_project_disk_event_list
List storage/disk.event # noqa: E501
"""
pass
def test_storage_project_disk_get(self):
"""Test case for storage_project_disk_get
Get storage/disk # noqa: E501
"""
pass
def test_storage_project_disk_list(self):
"""Test case for storage_project_disk_list
List storage/disk # noqa: E501
"""
pass
def test_storage_project_disk_metric_get(self):
"""Test case for storage_project_disk_metric_get
Get storage/disk.metric # noqa: E501
"""
pass
def test_storage_project_disk_metric_list(self):
"""Test case for storage_project_disk_metric_list
List storage/disk.metric # noqa: E501
"""
pass
def test_storage_project_disk_metric_point_list(self):
"""Test case for storage_project_disk_metric_point_list
List storage/disk.point # noqa: E501
"""
pass
def test_storage_project_disk_resize(self):
"""Test case for storage_project_disk_resize
Resize storage/disk # noqa: E501
"""
pass
def test_storage_project_disk_service_get(self):
"""Test case for storage_project_disk_service_get
Get storage/disk.service # noqa: E501
"""
pass
def test_storage_project_disk_service_list(self):
"""Test case for storage_project_disk_service_list
List storage/disk.service # noqa: E501
"""
pass
def test_storage_project_disk_tag_create(self):
"""Test case for storage_project_disk_tag_create
Create storage/disk.tag # noqa: E501
"""
pass
def test_storage_project_disk_tag_delete(self):
"""Test case for storage_project_disk_tag_delete
Delete storage/disk.tag # noqa: E501
"""
pass
def test_storage_project_disk_tag_get(self):
"""Test case for storage_project_disk_tag_get
Get storage/disk.tag # noqa: E501
"""
pass
def test_storage_project_disk_tag_list(self):
"""Test case for storage_project_disk_tag_list
List storage/disk.tag # noqa: E501
"""
pass
def test_storage_project_disk_tag_put(self):
"""Test case for storage_project_disk_tag_put
Replace storage/disk.tag # noqa: E501
"""
pass
def test_storage_project_disk_transfer(self):
"""Test case for storage_project_disk_transfer
Transfer storage/disk # noqa: E501
"""
pass
def test_storage_project_disk_update(self):
"""Test case for storage_project_disk_update
Update storage/disk # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 24.096591
| 79
| 0.641594
| 518
| 4,241
| 4.897683
| 0.108108
| 0.237288
| 0.305085
| 0.148995
| 0.781238
| 0.75404
| 0.737091
| 0.672054
| 0.435948
| 0.389042
| 0
| 0.025362
| 0.284131
| 4,241
| 175
| 80
| 24.234286
| 0.810277
| 0.447536
| 0
| 0.423077
| 1
| 0
| 0.004322
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.442308
| false
| 0.423077
| 0.057692
| 0
| 0.519231
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
3fb8d38494cc02aa3c3ffb1cecee264d03d79efb
| 192
|
py
|
Python
|
zipf/factories/zipf_from_list/zipf_from_list.py
|
LucaCappelletti94/zipf
|
956c3a1d56958384a02d5bb4671c6883cd9a25e3
|
[
"MIT"
] | 3
|
2018-11-07T01:56:09.000Z
|
2020-05-31T12:24:09.000Z
|
zipf/factories/zipf_from_list/zipf_from_list.py
|
LucaCappelletti94/zipf
|
956c3a1d56958384a02d5bb4671c6883cd9a25e3
|
[
"MIT"
] | 1
|
2018-05-15T15:58:06.000Z
|
2018-05-15T15:58:06.000Z
|
zipf/factories/zipf_from_list/zipf_from_list.py
|
LucaCappelletti94/zipf
|
956c3a1d56958384a02d5bb4671c6883cd9a25e3
|
[
"MIT"
] | null | null | null |
"""ZipfFromList create a Zipf from a given list."""
from ..zipf_factory import ZipfFactory
class ZipfFromList(ZipfFactory):
"""ZipfFromList create a Zipf from a given list."""
pass
| 21.333333
| 55
| 0.723958
| 25
| 192
| 5.52
| 0.48
| 0.26087
| 0.275362
| 0.333333
| 0.536232
| 0.536232
| 0.536232
| 0.536232
| 0
| 0
| 0
| 0
| 0.177083
| 192
| 8
| 56
| 24
| 0.873418
| 0.473958
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 9
|
3fbb52ff38f45e99e4c9dd429f0c13f81a36f347
| 184
|
py
|
Python
|
inmemory/redisutils.py
|
opennlp/DeepPhrase
|
54bd6ca96c12475e3c3ff3745a4eb7c245b6e870
|
[
"MIT"
] | 2
|
2019-06-19T12:52:31.000Z
|
2020-05-20T15:29:56.000Z
|
inmemory/redisutils.py
|
opennlp/DeepPhrase
|
54bd6ca96c12475e3c3ff3745a4eb7c245b6e870
|
[
"MIT"
] | 5
|
2019-12-17T05:44:10.000Z
|
2022-02-10T00:29:31.000Z
|
inmemory/redisutils.py
|
opennlp/DeepPhrase
|
54bd6ca96c12475e3c3ff3745a4eb7c245b6e870
|
[
"MIT"
] | 3
|
2019-10-06T13:31:31.000Z
|
2022-03-16T16:13:09.000Z
|
def store_value(redis_client,key_name,value):
redis_client.setnx(key_name,value)
return True
def get_key_value(redis_client,key_name):
return redis_client.get(key_name)
| 20.444444
| 45
| 0.782609
| 30
| 184
| 4.433333
| 0.366667
| 0.330827
| 0.360902
| 0.285714
| 0.345865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 184
| 8
| 46
| 23
| 0.83125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
3fca28b52fcef7f7b3a5292aa1be4b29855ea673
| 236
|
py
|
Python
|
spacetraders/sync/errors.py
|
MrKomodoDragon/spacetraders.py
|
9e6a002157cf0bc250650768bea4c79d182a3527
|
[
"MIT"
] | 1
|
2021-05-31T15:43:52.000Z
|
2021-05-31T15:43:52.000Z
|
spacetraders/sync/errors.py
|
muunie/spacetraders.py
|
9e6a002157cf0bc250650768bea4c79d182a3527
|
[
"MIT"
] | null | null | null |
spacetraders/sync/errors.py
|
muunie/spacetraders.py
|
9e6a002157cf0bc250650768bea4c79d182a3527
|
[
"MIT"
] | null | null | null |
class HTTPException(Exception):
def __init__(self, status: int, reason: str):
self.status = status
self.reason = reason
def __str__(self):
return "HTTPException: {}: {}".format(self.status, self.reason)
| 29.5
| 71
| 0.639831
| 26
| 236
| 5.5
| 0.461538
| 0.20979
| 0.223776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228814
| 236
| 8
| 71
| 29.5
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0.088983
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
b210109b6591c7bb61e523261a52d9ee1739058a
| 321,623
|
py
|
Python
|
contrib/monarch/datamodel.py
|
balhoff/biolink-model
|
6e04104b8a7c3718ca3d00c11eb48963206f4855
|
[
"CC0-1.0"
] | null | null | null |
contrib/monarch/datamodel.py
|
balhoff/biolink-model
|
6e04104b8a7c3718ca3d00c11eb48963206f4855
|
[
"CC0-1.0"
] | null | null | null |
contrib/monarch/datamodel.py
|
balhoff/biolink-model
|
6e04104b8a7c3718ca3d00c11eb48963206f4855
|
[
"CC0-1.0"
] | null | null | null |
## CLASSES
class NamedThing(object):
"""
a databased entity or concept/class
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class NamedThing(object):
"""
a databased entity or concept/class
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class BiologicalEntity(NamedThing):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class BiologicalEntity(NamedThing):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class OrganismalEntity(BiologicalEntity):
"""
A named entity that is either a part of an organism, a whole organism, population or clade of organisms, excluding molecular entities
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class OrganismalEntity(BiologicalEntity):
"""
A named entity that is either a part of an organism, a whole organism, population or clade of organisms, excluding molecular entities
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class PopulationOfIndividualOrganisms(OrganismalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class PopulationOfIndividualOrganisms(OrganismalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Cohort(PopulationOfIndividualOrganisms):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class ExtensionsAndEvidenceAssociationMixin(object):
"""
An injected mixing that adds additional fields to association objects. This is a mixture of (a) closures for denormalization (b) evidence fields specific to the monarch model.
"""
def __init__(self,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class TaxonClosureMixin(object):
"""
An association that includes flattened inlined objects, such as subject_taxon_closure
"""
def __init__(self,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class RelationshipType(object):
"""
An OWL property used as an edge label
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class Attribute(object):
"""
A property or characteristic of an entity
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class Attribute(object):
"""
A property or characteristic of an entity
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class BiologicalSex(Attribute):
"""
None
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class BiologicalSex(Attribute):
"""
None
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class PhenotypicSex(BiologicalSex):
"""
An attribute corresponding to the phenotypic sex of the individual, based upon the reproductive organs present.
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class GenotypicSex(BiologicalSex):
"""
An attribute corresponding to the genotypic sex of the individual, based upon genotypic composition of sex chromosomes.
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class SeverityValue(Attribute):
"""
describes the severity of a phenotypic feature or disease
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class FrequencyValue(Attribute):
"""
describes the frequency of occurrence of an event or condition
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class ClinicalModifier(Attribute):
"""
Used to characterize and specify the phenotypic abnormalities defined in the Phenotypic abnormality subontology, with respect to severity, laterality, age of onset, and other aspects
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class Onset(Attribute):
"""
The age group in which manifestations appear
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class OntologyClass(object):
"""
a concept or class in an ontology, vocabulary or thesaurus
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class OntologyClass(object):
"""
a concept or class in an ontology, vocabulary or thesaurus
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class GeneOntologyClass(OntologyClass):
"""
an ontology class that describes a functional aspect of a gene, gene prodoct or complex
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class ThingWithTaxon(object):
"""
A mixin that can be used on any entity with a taxon
"""
def __init__(self,
in_taxon=None):
self.in_taxon=in_taxon
def __str__(self):
return "in_taxon={} ".format(self.in_taxon)
def __repr__(self):
return self.__str__()
class OrganismTaxon(OrganismalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class IndividualOrganism(OrganismalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class IndividualOrganism(OrganismalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Case(IndividualOrganism):
"""
An individual organism that has a patient role in some clinical context.
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Biosample(OrganismalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class DiseaseOrPhenotypicFeature(BiologicalEntity):
"""
Either one of a disease or an individual phenotypic feature. Some knowledge resources such as Monarch treat these as distinct, others such as MESH conflate.
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class DiseaseOrPhenotypicFeature(BiologicalEntity):
"""
Either one of a disease or an individual phenotypic feature. Some knowledge resources such as Monarch treat these as distinct, others such as MESH conflate.
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Disease(DiseaseOrPhenotypicFeature):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class PhenotypicFeature(DiseaseOrPhenotypicFeature):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Environment(BiologicalEntity):
"""
A feature of the environment of an organism that influences one or more phenotypic features of that organism, potentially mediated by genes
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class InformationContentEntity(NamedThing):
"""
a piece of information that typically describes some piece of biology or is used as support.
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class InformationContentEntity(NamedThing):
"""
a piece of information that typically describes some piece of biology or is used as support.
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class ConfidenceLevel(InformationContentEntity):
"""
Level of confidence in a statement
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class EvidenceType(InformationContentEntity):
"""
Class of evidence that supports an association
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class Publication(InformationContentEntity):
"""
Any published piece of information. Can refer to a whole publication, or to a part of it (e.g. a figure, figure legend, or section highlighted by NLP). The scope is intended to be general and include information published on the web as well as journals.
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class AdministrativeEntity(object):
"""
None
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class AdministrativeEntity(object):
"""
None
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class Provider(AdministrativeEntity):
"""
person, group, organization or project that provides a piece of information
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class MolecularEntity(BiologicalEntity):
"""
A gene, gene product, small molecule or macromolecule (including protein complex)
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class MolecularEntity(BiologicalEntity):
"""
A gene, gene product, small molecule or macromolecule (including protein complex)
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class ChemicalSubstance(MolecularEntity):
"""
may be a chemical entity or a formulation with a chemical entity as active ingredient, or a complex material with multiple chemical entities as part
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class AnatomicalEntity(OrganismalEntity):
"""
A subcellular location, cell type or gross anatomical part
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class LifeStage(OrganismalEntity):
"""
A stage of development or growth of an organism, including post-natal adult stages
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class PlanetaryEntity(NamedThing):
"""
Any entity or process that exists at the level of the whole planet
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class PlanetaryEntity(NamedThing):
"""
Any entity or process that exists at the level of the whole planet
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class EnvironmentalProcess(PlanetaryEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class EnvironmentalFeature(PlanetaryEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class ClinicalEntity(NamedThing):
"""
Any entity or process that exists in the clinical domain and outside the biological realm. Diseases are placed under biological entities
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class ClinicalEntity(NamedThing):
"""
Any entity or process that exists in the clinical domain and outside the biological realm. Diseases are placed under biological entities
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class ClinicalTrial(ClinicalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class ClinicalIntervention(ClinicalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class GenomicEntity(MolecularEntity):
"""
an entity that can either be directly located on a genome (gene, transcript, exon, regulatory region) or is encoded in a genome (protein)
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class GenomicEntity(MolecularEntity):
"""
an entity that can either be directly located on a genome (gene, transcript, exon, regulatory region) or is encoded in a genome (protein)
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Genome(GenomicEntity):
"""
A genome is the sum of genetic material within a cell or virion.
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Transcript(GenomicEntity):
"""
An RNA synthesized on a DNA or RNA template by an RNA polymerase
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Exon(GenomicEntity):
"""
A region of the transcript sequence within a gene which is not removed from the primary RNA transcript by RNA splicing
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class CodingSequence(GenomicEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class GeneOrGeneProduct(GenomicEntity):
"""
a union of genes or gene products. Frequently an identifier for one will be used as proxy for another
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class GeneOrGeneProduct(GenomicEntity):
"""
a union of genes or gene products. Frequently an identifier for one will be used as proxy for another
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Gene(GeneOrGeneProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class GeneProduct(GeneOrGeneProduct):
"""
The functional molecular product of a single gene. Gene products are either proteins or functional RNA molecules
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class GeneProduct(GeneOrGeneProduct):
"""
The functional molecular product of a single gene. Gene products are either proteins or functional RNA molecules
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Protein(GeneProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class RnaProduct(GeneProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class RnaProduct(GeneProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class NoncodingRnaProduct(RnaProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class NoncodingRnaProduct(RnaProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Microrna(NoncodingRnaProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class MacromolecularComplex(MolecularEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class GeneGrouping(object):
"""
any grouping of multiple genes or gene products
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class GeneFamily(MolecularEntity):
"""
any grouping of multiple genes or gene products related by common descent
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Zygosity(Attribute):
"""
None
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class Genotype(GenomicEntity):
"""
An information content entity that describes a genome by specifying the total variation in genomic sequence and/or gene expression, relative to some extablished background
"""
def __init__(self,
has_zygosity=None,
id=None,
label=None,
in_taxon=None):
self.has_zygosity=has_zygosity
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "has_zygosity={} id={} label={} in_taxon={} ".format(self.has_zygosity,self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Genotype(GenomicEntity):
"""
An information content entity that describes a genome by specifying the total variation in genomic sequence and/or gene expression, relative to some extablished background
"""
def __init__(self,
has_zygosity=None,
id=None,
label=None,
in_taxon=None):
self.has_zygosity=has_zygosity
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "has_zygosity={} id={} label={} in_taxon={} ".format(self.has_zygosity,self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Allele(Genotype):
"""
A genomic feature representing one of a set of coexisting sequence variants at a particular genomic locus
"""
def __init__(self,
has_gene=None,
has_zygosity=None,
id=None,
label=None,
in_taxon=None):
self.has_gene=has_gene
self.has_zygosity=has_zygosity
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "has_gene={} has_zygosity={} id={} label={} in_taxon={} ".format(self.has_gene,self.has_zygosity,self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class SequenceVariant(GenomicEntity):
"""
A genomic feature representing one of a set of coexisting sequence variants at a particular genomic locus.
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Environment(BiologicalEntity):
"""
A feature of the environment of an organism that influences one or more phenotypic features of that organism, potentially mediated by genes
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class DrugExposure(Environment):
"""
A drug exposure is an intake of a particular chemical substance
"""
def __init__(self,
drug=None,
id=None,
label=None):
self.drug=drug
self.id=id
self.label=label
def __str__(self):
return "drug={} id={} label={} ".format(self.drug,self.id,self.label)
def __repr__(self):
return self.__str__()
class Treatment(Environment):
"""
A treatment is targeted at a disease or phenotype and may involve multiple drug 'exposures'
"""
def __init__(self,
treats=None,
has_exposure_parts=None,
id=None,
label=None):
self.treats=treats
self.has_exposure_parts=has_exposure_parts
self.id=id
self.label=label
def __str__(self):
return "treats={} has_exposure_parts={} id={} label={} ".format(self.treats,self.has_exposure_parts,self.id,self.label)
def __repr__(self):
return self.__str__()
class GeographicLocation(PlanetaryEntity):
"""
a location that can be described in lat/long coordinates
"""
def __init__(self,
latitude=None,
longitude=None,
id=None,
label=None):
self.latitude=latitude
self.longitude=longitude
self.id=id
self.label=label
def __str__(self):
return "latitude={} longitude={} id={} label={} ".format(self.latitude,self.longitude,self.id,self.label)
def __repr__(self):
return self.__str__()
class GeographicLocationAtTime(PlanetaryEntity):
"""
a location that can be described in lat/long coordinates, for a particular time
"""
def __init__(self,
latitude=None,
longitude=None,
timepoint=None,
id=None,
label=None):
self.latitude=latitude
self.longitude=longitude
self.timepoint=timepoint
self.id=id
self.label=label
def __str__(self):
return "latitude={} longitude={} timepoint={} id={} label={} ".format(self.latitude,self.longitude,self.timepoint,self.id,self.label)
def __repr__(self):
return self.__str__()
class Association(InformationContentEntity):
"""
A typed association between two entities, supported by evidence
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class Association(InformationContentEntity):
"""
A typed association between two entities, supported by evidence
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GenotypeToGenotypePartAssociation(Association):
"""
Any association between one genotype and a genotypic entity that is a sub-component of it
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GenotypeToGeneAssociation(Association):
"""
Any association between a genotype and a gene. The genotype have have multiple variants in that gene or a single one. There is no assumption of cardinality
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GenotypeToVariantAssociation(Association):
"""
Any association between a genotype and a sequence variant.
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToGeneAssociation(Association):
"""
abstract parent class for different kinds of gene-gene or gene product to gene product relationships. Includes homology and interaction.
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToGeneAssociation(Association):
"""
abstract parent class for different kinds of gene-gene or gene product to gene product relationships. Includes homology and interaction.
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToGeneHomologyAssociation(GeneToGeneAssociation):
"""
A homology association between two genes. May be orthology (in which case the species of subject and object should differ) or paralogy (in which case the species may be the same)
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class MolecularInteraction(Association):
"""
An interaction at the molecular level between two physical entities
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class PairwiseGeneOrProteinInteractionAssociation(GeneToGeneAssociation):
"""
An interaction between two genes or two gene products. May be physical (e.g. protein binding) or genetic (between genes). May be symmetric (e.g. protein interaction) or directed (e.g. phosphorylation)
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToThingAssociation(Association):
"""
An interaction between a chemical entity and another entity
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class CaseToThingAssociation(Association):
"""
An abstract association for use where the case is the subject
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToGeneAssociation(Association):
"""
An interaction between a chemical entity or substance and a gene or gene product. The chemical substance may be a drug with the gene being a target of the drug.
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToDiseaseOrPhenotypicFeatureAssociation(Association):
"""
An interaction between a chemical entity and a phenotype or disease, where the presence of the chemical gives rise to or exacerbates the phenotype
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToPathwayAssociation(Association):
"""
An interaction between a chemical entity and a biological process or pathway
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToGeneAssociation(Association):
"""
An interaction between a chemical entity or substance and a gene or gene product. The chemical substance may be a drug with the gene being a target of the drug.
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class BiosampleToThingAssociation(Association):
"""
An association between a biosample and something
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class BiosampleToDiseaseOrPhenotypicFeatureAssociation(Association):
"""
An association between a biosample and a disease or phenotype
definitional: true
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class EntityToPhenotypicFeatureAssociation(Association):
"""
None
"""
def __init__(self,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier,self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class EntityToDiseaseAssociation(object):
"""
None
"""
def __init__(self,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None):
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
def __str__(self):
return "frequency_qualifier={} severity_qualifier={} onset_qualifier={} ".format(self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier)
def __repr__(self):
return self.__str__()
class ThingToDiseaseOrPhenotypicFeatureAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class DiseaseToThingAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GenotypeToPhenotypicFeatureAssociation(Association):
"""
Any association between one genotype and a phenotypic feature, where having the genotype confers the phenotype, either in isolation or through environment
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class EnvironmentToPhenotypicFeatureAssociation(Association):
"""
Any association between an environment and a phenotypic feature, where being in the environment influences the phenotype
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class DiseaseToPhenotypicFeatureAssociation(Association):
"""
An association between a disease and a phenotypic feature in which the phenotypic feature is associated with the disease in some way
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class CaseToPhenotypicFeatureAssociation(Association):
"""
An association between a case (e.g. individual patient) and a phenotypic feature in which the individual has or has had the phenotype
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class GeneToThingAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToPhenotypicFeatureAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class GeneToDiseaseAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier)
def __repr__(self):
return self.__str__()
class ModelToDiseaseMixin(object):
"""
This mixin is used for any association class for which the subject plays the role of a 'model'
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class GeneToDiseaseAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier)
def __repr__(self):
return self.__str__()
class GeneAsAModelOfDiseaseAssociation(GeneToDiseaseAssociation):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier)
def __repr__(self):
return self.__str__()
class GeneHasVariantThatContributesToDiseaseAssociation(GeneToDiseaseAssociation):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier)
def __repr__(self):
return self.__str__()
class GenotypeToThingAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToExpressionSiteAssociation(Association):
"""
An association between a gene and an expression site, possibly qualified by stage/timing info.
"""
def __init__(self,
stage_qualifier=None,
quantifier_qualifier=None,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.stage_qualifier=stage_qualifier
self.quantifier_qualifier=quantifier_qualifier
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "stage_qualifier={} quantifier_qualifier={} association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.stage_qualifier,self.quantifier_qualifier,self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class SequenceVariantModulatesTreatmentAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToGoTermAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class AssociationResultSet(InformationContentEntity):
"""
None
"""
def __init__(self,
associations=None,
id=None,
label=None):
self.associations=associations
self.id=id
self.label=label
def __str__(self):
return "associations={} id={} label={} ".format(self.associations,self.id,self.label)
def __repr__(self):
return self.__str__()
class GenomicSequenceLocalization(Association):
"""
A relationship between a sequence feature and an entity it is localized to. The reference entity may be a chromosome, chromosome region or information entity such as a contig
"""
def __init__(self,
start_interbase_coordinate=None,
end_interbase_coordinate=None,
genome_build=None,
phase=None,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.start_interbase_coordinate=start_interbase_coordinate
self.end_interbase_coordinate=end_interbase_coordinate
self.genome_build=genome_build
self.phase=phase
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "start_interbase_coordinate={} end_interbase_coordinate={} genome_build={} phase={} association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.start_interbase_coordinate,self.end_interbase_coordinate,self.genome_build,self.phase,self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class SequenceFeatureRelationship(Association):
"""
For example, a particular exon is part of a particular transcript or gene
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class SequenceFeatureRelationship(Association):
"""
For example, a particular exon is part of a particular transcript or gene
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class TranscriptToGeneRelationship(SequenceFeatureRelationship):
"""
A gene is a collection of transcripts
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToGeneProductRelationship(SequenceFeatureRelationship):
"""
A gene is transcribed and potentially translated to a gene product
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ExonToTranscriptRelationship(SequenceFeatureRelationship):
"""
A transcript is formed from multiple exons
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class SequenceFeatureToSequenceRelationship(Association):
"""
Relates a sequence feature such as a gene to its sequence
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneRegulatoryRelationship(Association):
"""
A regulatory relationship between two genes
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class AnatomicalEntityToAnatomicalEntityAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class AnatomicalEntityToAnatomicalEntityAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class AnatomicalEntityPartOfAnatomicalEntityAssociation(AnatomicalEntityToAnatomicalEntityAssociation):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class Occurrent(object):
"""
A processual entity
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class Occurrent(object):
"""
A processual entity
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class MolecularActivity(Occurrent):
"""
An execution of a molecular function
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class BiologicalProcess(BiologicalEntity):
"""
One or more causally connected executions of molecular functions
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class BiologicalProcess(BiologicalEntity):
"""
One or more causally connected executions of molecular functions
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class Pathway(BiologicalProcess):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class AnatomicalEntity(OrganismalEntity):
"""
A subcellular location, cell type or gross anatomical part
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class CellularComponent(AnatomicalEntity):
"""
A location in or around a cell
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Cell(AnatomicalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class GrossAnatomicalStructure(AnatomicalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class NamedGraph(InformationContentEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class PropertyValuePair(object):
"""
None
"""
def __init__(self,
relation=None,
filler=None):
self.relation=relation
self.filler=filler
def __str__(self):
return "relation={} filler={} ".format(self.relation,self.filler)
def __repr__(self):
return self.__str__()
class RelationshipType(object):
"""
An OWL property used as an edge label
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class PhenotypicSex(BiologicalSex):
"""
An attribute corresponding to the phenotypic sex of the individual, based upon the reproductive organs present.
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class GenotypicSex(BiologicalSex):
"""
An attribute corresponding to the genotypic sex of the individual, based upon genotypic composition of sex chromosomes.
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class SeverityValue(Attribute):
"""
describes the severity of a phenotypic feature or disease
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class FrequencyValue(Attribute):
"""
describes the frequency of occurrence of an event or condition
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class ClinicalModifier(Attribute):
"""
Used to characterize and specify the phenotypic abnormalities defined in the Phenotypic abnormality subontology, with respect to severity, laterality, age of onset, and other aspects
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class Onset(Attribute):
"""
The age group in which manifestations appear
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class GeneOntologyClass(OntologyClass):
"""
an ontology class that describes a functional aspect of a gene, gene prodoct or complex
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class ThingWithTaxon(object):
"""
A mixin that can be used on any entity with a taxon
"""
def __init__(self,
in_taxon=None):
self.in_taxon=in_taxon
def __str__(self):
return "in_taxon={} ".format(self.in_taxon)
def __repr__(self):
return self.__str__()
class OrganismTaxon(OrganismalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class Case(IndividualOrganism):
"""
An individual organism that has a patient role in some clinical context.
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Biosample(OrganismalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Disease(DiseaseOrPhenotypicFeature):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class PhenotypicFeature(DiseaseOrPhenotypicFeature):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class ConfidenceLevel(InformationContentEntity):
"""
Level of confidence in a statement
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class EvidenceType(InformationContentEntity):
"""
Class of evidence that supports an association
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class Publication(InformationContentEntity):
"""
Any published piece of information. Can refer to a whole publication, or to a part of it (e.g. a figure, figure legend, or section highlighted by NLP). The scope is intended to be general and include information published on the web as well as journals.
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class Provider(AdministrativeEntity):
"""
person, group, organization or project that provides a piece of information
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class ChemicalSubstance(MolecularEntity):
"""
may be a chemical entity or a formulation with a chemical entity as active ingredient, or a complex material with multiple chemical entities as part
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class LifeStage(OrganismalEntity):
"""
A stage of development or growth of an organism, including post-natal adult stages
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class EnvironmentalProcess(PlanetaryEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class EnvironmentalFeature(PlanetaryEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class ClinicalTrial(ClinicalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class ClinicalIntervention(ClinicalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class Genome(GenomicEntity):
"""
A genome is the sum of genetic material within a cell or virion.
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Transcript(GenomicEntity):
"""
An RNA synthesized on a DNA or RNA template by an RNA polymerase
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Exon(GenomicEntity):
"""
A region of the transcript sequence within a gene which is not removed from the primary RNA transcript by RNA splicing
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class CodingSequence(GenomicEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Gene(GeneOrGeneProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Protein(GeneProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Microrna(NoncodingRnaProduct):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class MacromolecularComplex(MolecularEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class GeneGrouping(object):
"""
any grouping of multiple genes or gene products
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class GeneFamily(MolecularEntity):
"""
any grouping of multiple genes or gene products related by common descent
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Zygosity(Attribute):
"""
None
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class Allele(Genotype):
"""
A genomic feature representing one of a set of coexisting sequence variants at a particular genomic locus
"""
def __init__(self,
has_gene=None,
has_zygosity=None,
id=None,
label=None,
in_taxon=None):
self.has_gene=has_gene
self.has_zygosity=has_zygosity
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "has_gene={} has_zygosity={} id={} label={} in_taxon={} ".format(self.has_gene,self.has_zygosity,self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class SequenceVariant(GenomicEntity):
"""
A genomic feature representing one of a set of coexisting sequence variants at a particular genomic locus.
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class DrugExposure(Environment):
"""
A drug exposure is an intake of a particular chemical substance
"""
def __init__(self,
drug=None,
id=None,
label=None):
self.drug=drug
self.id=id
self.label=label
def __str__(self):
return "drug={} id={} label={} ".format(self.drug,self.id,self.label)
def __repr__(self):
return self.__str__()
class Treatment(Environment):
"""
A treatment is targeted at a disease or phenotype and may involve multiple drug 'exposures'
"""
def __init__(self,
treats=None,
has_exposure_parts=None,
id=None,
label=None):
self.treats=treats
self.has_exposure_parts=has_exposure_parts
self.id=id
self.label=label
def __str__(self):
return "treats={} has_exposure_parts={} id={} label={} ".format(self.treats,self.has_exposure_parts,self.id,self.label)
def __repr__(self):
return self.__str__()
class GeographicLocation(PlanetaryEntity):
"""
a location that can be described in lat/long coordinates
"""
def __init__(self,
latitude=None,
longitude=None,
id=None,
label=None):
self.latitude=latitude
self.longitude=longitude
self.id=id
self.label=label
def __str__(self):
return "latitude={} longitude={} id={} label={} ".format(self.latitude,self.longitude,self.id,self.label)
def __repr__(self):
return self.__str__()
class GeographicLocationAtTime(PlanetaryEntity):
"""
a location that can be described in lat/long coordinates, for a particular time
"""
def __init__(self,
latitude=None,
longitude=None,
timepoint=None,
id=None,
label=None):
self.latitude=latitude
self.longitude=longitude
self.timepoint=timepoint
self.id=id
self.label=label
def __str__(self):
return "latitude={} longitude={} timepoint={} id={} label={} ".format(self.latitude,self.longitude,self.timepoint,self.id,self.label)
def __repr__(self):
return self.__str__()
class GenotypeToGenotypePartAssociation(Association):
"""
Any association between one genotype and a genotypic entity that is a sub-component of it
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GenotypeToGeneAssociation(Association):
"""
Any association between a genotype and a gene. The genotype have have multiple variants in that gene or a single one. There is no assumption of cardinality
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GenotypeToVariantAssociation(Association):
"""
Any association between a genotype and a sequence variant.
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToGeneHomologyAssociation(GeneToGeneAssociation):
"""
A homology association between two genes. May be orthology (in which case the species of subject and object should differ) or paralogy (in which case the species may be the same)
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class MolecularInteraction(Association):
"""
An interaction at the molecular level between two physical entities
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class PairwiseGeneOrProteinInteractionAssociation(GeneToGeneAssociation):
"""
An interaction between two genes or two gene products. May be physical (e.g. protein binding) or genetic (between genes). May be symmetric (e.g. protein interaction) or directed (e.g. phosphorylation)
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToThingAssociation(Association):
"""
An interaction between a chemical entity and another entity
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class CaseToThingAssociation(Association):
"""
An abstract association for use where the case is the subject
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToGeneAssociation(Association):
"""
An interaction between a chemical entity or substance and a gene or gene product. The chemical substance may be a drug with the gene being a target of the drug.
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToDiseaseOrPhenotypicFeatureAssociation(Association):
"""
An interaction between a chemical entity and a phenotype or disease, where the presence of the chemical gives rise to or exacerbates the phenotype
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToPathwayAssociation(Association):
"""
An interaction between a chemical entity and a biological process or pathway
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ChemicalToGeneAssociation(Association):
"""
An interaction between a chemical entity or substance and a gene or gene product. The chemical substance may be a drug with the gene being a target of the drug.
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class BiosampleToThingAssociation(Association):
"""
An association between a biosample and something
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class BiosampleToDiseaseOrPhenotypicFeatureAssociation(Association):
"""
An association between a biosample and a disease or phenotype
definitional: true
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class EntityToPhenotypicFeatureAssociation(Association):
"""
None
"""
def __init__(self,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier,self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class EntityToDiseaseAssociation(object):
"""
None
"""
def __init__(self,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None):
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
def __str__(self):
return "frequency_qualifier={} severity_qualifier={} onset_qualifier={} ".format(self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier)
def __repr__(self):
return self.__str__()
class ThingToDiseaseOrPhenotypicFeatureAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class DiseaseToThingAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GenotypeToPhenotypicFeatureAssociation(Association):
"""
Any association between one genotype and a phenotypic feature, where having the genotype confers the phenotype, either in isolation or through environment
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class EnvironmentToPhenotypicFeatureAssociation(Association):
"""
Any association between an environment and a phenotypic feature, where being in the environment influences the phenotype
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class DiseaseToPhenotypicFeatureAssociation(Association):
"""
An association between a disease and a phenotypic feature in which the phenotypic feature is associated with the disease in some way
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class CaseToPhenotypicFeatureAssociation(Association):
"""
An association between a case (e.g. individual patient) and a phenotypic feature in which the individual has or has had the phenotype
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class GeneToThingAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToPhenotypicFeatureAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None,
sex_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
self.sex_qualifier=sex_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} sex_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier,self.sex_qualifier)
def __repr__(self):
return self.__str__()
class ModelToDiseaseMixin(object):
"""
This mixin is used for any association class for which the subject plays the role of a 'model'
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class GeneAsAModelOfDiseaseAssociation(GeneToDiseaseAssociation):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier)
def __repr__(self):
return self.__str__()
class GeneHasVariantThatContributesToDiseaseAssociation(GeneToDiseaseAssociation):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None,
frequency_qualifier=None,
severity_qualifier=None,
onset_qualifier=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
self.frequency_qualifier=frequency_qualifier
self.severity_qualifier=severity_qualifier
self.onset_qualifier=onset_qualifier
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} frequency_qualifier={} severity_qualifier={} onset_qualifier={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label,self.frequency_qualifier,self.severity_qualifier,self.onset_qualifier)
def __repr__(self):
return self.__str__()
class GenotypeToThingAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToExpressionSiteAssociation(Association):
"""
An association between a gene and an expression site, possibly qualified by stage/timing info.
"""
def __init__(self,
stage_qualifier=None,
quantifier_qualifier=None,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.stage_qualifier=stage_qualifier
self.quantifier_qualifier=quantifier_qualifier
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "stage_qualifier={} quantifier_qualifier={} association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.stage_qualifier,self.quantifier_qualifier,self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class SequenceVariantModulatesTreatmentAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToGoTermAssociation(Association):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class AssociationResultSet(InformationContentEntity):
"""
None
"""
def __init__(self,
associations=None,
id=None,
label=None):
self.associations=associations
self.id=id
self.label=label
def __str__(self):
return "associations={} id={} label={} ".format(self.associations,self.id,self.label)
def __repr__(self):
return self.__str__()
class GenomicSequenceLocalization(Association):
"""
A relationship between a sequence feature and an entity it is localized to. The reference entity may be a chromosome, chromosome region or information entity such as a contig
"""
def __init__(self,
start_interbase_coordinate=None,
end_interbase_coordinate=None,
genome_build=None,
phase=None,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.start_interbase_coordinate=start_interbase_coordinate
self.end_interbase_coordinate=end_interbase_coordinate
self.genome_build=genome_build
self.phase=phase
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "start_interbase_coordinate={} end_interbase_coordinate={} genome_build={} phase={} association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.start_interbase_coordinate,self.end_interbase_coordinate,self.genome_build,self.phase,self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class TranscriptToGeneRelationship(SequenceFeatureRelationship):
"""
A gene is a collection of transcripts
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneToGeneProductRelationship(SequenceFeatureRelationship):
"""
A gene is transcribed and potentially translated to a gene product
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class ExonToTranscriptRelationship(SequenceFeatureRelationship):
"""
A transcript is formed from multiple exons
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class SequenceFeatureToSequenceRelationship(Association):
"""
Relates a sequence feature such as a gene to its sequence
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class GeneRegulatoryRelationship(Association):
"""
A regulatory relationship between two genes
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class AnatomicalEntityPartOfAnatomicalEntityAssociation(AnatomicalEntityToAnatomicalEntityAssociation):
"""
None
"""
def __init__(self,
association_type=None,
subject=None,
negated=None,
relation=None,
object=None,
qualifiers=None,
publications=None,
provided_by=None,
id=None,
label=None,
subject_extensions=None,
object_extensions=None,
has_evidence_graph=None,
has_evidence_type=None,
has_evidence=None,
subject_taxon=None,
subject_taxon_label=None,
subject_taxon_closure=None,
subject_taxon_closure_label=None,
object_taxon=None,
object_taxon_label=None,
object_taxon_closure=None,
object_taxon_closure_label=None):
self.association_type=association_type
self.subject=subject
self.negated=negated
self.relation=relation
self.object=object
self.qualifiers=qualifiers
self.publications=publications
self.provided_by=provided_by
self.id=id
self.label=label
self.subject_extensions=subject_extensions
self.object_extensions=object_extensions
self.has_evidence_graph=has_evidence_graph
self.has_evidence_type=has_evidence_type
self.has_evidence=has_evidence
self.subject_taxon=subject_taxon
self.subject_taxon_label=subject_taxon_label
self.subject_taxon_closure=subject_taxon_closure
self.subject_taxon_closure_label=subject_taxon_closure_label
self.object_taxon=object_taxon
self.object_taxon_label=object_taxon_label
self.object_taxon_closure=object_taxon_closure
self.object_taxon_closure_label=object_taxon_closure_label
def __str__(self):
return "association_type={} subject={} negated={} relation={} object={} qualifiers={} publications={} provided_by={} id={} label={} subject_extensions={} object_extensions={} has_evidence_graph={} has_evidence_type={} has_evidence={} subject_taxon={} subject_taxon_label={} subject_taxon_closure={} subject_taxon_closure_label={} object_taxon={} object_taxon_label={} object_taxon_closure={} object_taxon_closure_label={} ".format(self.association_type,self.subject,self.negated,self.relation,self.object,self.qualifiers,self.publications,self.provided_by,self.id,self.label,self.subject_extensions,self.object_extensions,self.has_evidence_graph,self.has_evidence_type,self.has_evidence,self.subject_taxon,self.subject_taxon_label,self.subject_taxon_closure,self.subject_taxon_closure_label,self.object_taxon,self.object_taxon_label,self.object_taxon_closure,self.object_taxon_closure_label)
def __repr__(self):
return self.__str__()
class MolecularActivity(Occurrent):
"""
An execution of a molecular function
"""
def __init__(self):
pass
def __str__(self):
return "".format()
def __repr__(self):
return self.__str__()
class Pathway(BiologicalProcess):
"""
None
"""
def __init__(self,
id=None,
label=None):
self.id=id
self.label=label
def __str__(self):
return "id={} label={} ".format(self.id,self.label)
def __repr__(self):
return self.__str__()
class CellularComponent(AnatomicalEntity):
"""
A location in or around a cell
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class Cell(AnatomicalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
class GrossAnatomicalStructure(AnatomicalEntity):
"""
None
"""
def __init__(self,
id=None,
label=None,
in_taxon=None):
self.id=id
self.label=label
self.in_taxon=in_taxon
def __str__(self):
return "id={} label={} in_taxon={} ".format(self.id,self.label,self.in_taxon)
def __repr__(self):
return self.__str__()
| 43.246336
| 1,073
| 0.666908
| 36,056
| 321,623
| 5.53342
| 0.013313
| 0.101106
| 0.08009
| 0.050523
| 0.997539
| 0.997313
| 0.997313
| 0.997313
| 0.997313
| 0.997313
| 0
| 0
| 0.249858
| 321,623
| 7,436
| 1,074
| 43.252152
| 0.826952
| 0.045796
| 0
| 0.998342
| 0
| 0.013765
| 0.128249
| 0.056237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.11592
| false
| 0.00597
| 0
| 0.07728
| 0.231841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b77800c335f950de32bb48ae22f09868250ab502
| 9,660
|
py
|
Python
|
src/genie/libs/parser/iosxr/tests/ShowSpanningTreePvrst/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxr/tests/ShowSpanningTreePvrst/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxr/tests/ShowSpanningTreePvrst/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
'pvst': {
'a': {
'pvst_id': 'a',
'vlans': {
2: {
'vlan_id': 2,
'designated_root_priority': 32768,
'designated_root_address': '0021.1bff.d973',
'designated_root_max_age': 20,
'designated_root_forward_delay': 15,
'bridge_priority': 32768,
'sys_id_ext': 0,
'bridge_address': '8cb6.4fff.6588',
'bridge_max_age': 20,
'bridge_forward_delay': 15,
'bridge_transmit_hold_count': 6,
'interface': {
'GigabitEthernet0/7/0/0': {
'name': 'GigabitEthernet0/7/0/0',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 1,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 1,
},
'GigabitEthernet0/7/0/1': {
'name': 'GigabitEthernet0/7/0/1',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 2,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 2,
},
'GigabitEthernet0/7/0/10': {
'name': 'GigabitEthernet0/7/0/10',
'cost': 20000,
'role': 'ROOT',
'port_priority': 128,
'port_num': 3,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 3,
},
'GigabitEthernet0/7/0/11': {
'name': 'GigabitEthernet0/7/0/11',
'cost': 20000,
'role': 'ALT',
'port_priority': 128,
'port_num': 4,
'port_state': 'BLK',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 4,
},
},
},
3: {
'vlan_id': 3,
'designated_root_priority': 32768,
'designated_root_address': '0021.1bff.d973',
'designated_root_max_age': 20,
'designated_root_forward_delay': 15,
'bridge_priority': 32768,
'sys_id_ext': 0,
'bridge_address': '8cb6.4fff.6588',
'bridge_max_age': 20,
'bridge_forward_delay': 15,
'bridge_transmit_hold_count': 6,
'interface': {
'GigabitEthernet0/7/0/0': {
'name': 'GigabitEthernet0/7/0/0',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 1,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 1,
},
'GigabitEthernet0/7/0/1': {
'name': 'GigabitEthernet0/7/0/1',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 2,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 2,
},
'GigabitEthernet0/7/0/10': {
'name': 'GigabitEthernet0/7/0/10',
'cost': 20000,
'role': 'ROOT',
'port_priority': 128,
'port_num': 3,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 3,
},
'GigabitEthernet0/7/0/11': {
'name': 'GigabitEthernet0/7/0/11',
'cost': 20000,
'role': 'ALT',
'port_priority': 128,
'port_num': 4,
'port_state': 'BLK',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 4,
},
},
},
4: {
'vlan_id': 4,
'designated_root_priority': 32768,
'designated_root_address': '0021.1bff.d973',
'designated_root_max_age': 20,
'designated_root_forward_delay': 15,
'bridge_priority': 32768,
'sys_id_ext': 0,
'bridge_address': '8cb6.4fff.6588',
'bridge_max_age': 20,
'bridge_forward_delay': 15,
'bridge_transmit_hold_count': 6,
'interface': {
'GigabitEthernet0/7/0/0': {
'name': 'GigabitEthernet0/7/0/0',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 1,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 1,
},
'GigabitEthernet0/7/0/1': {
'name': 'GigabitEthernet0/7/0/1',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 2,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 2,
},
'GigabitEthernet0/7/0/10': {
'name': 'GigabitEthernet0/7/0/10',
'cost': 20000,
'role': 'ROOT',
'port_priority': 128,
'port_num': 3,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 3,
},
'GigabitEthernet0/7/0/11': {
'name': 'GigabitEthernet0/7/0/11',
'cost': 20000,
'role': 'ALT',
'port_priority': 128,
'port_num': 4,
'port_state': 'BLK',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 4,
},
},
},
},
},
},
}
| 48.059701
| 74
| 0.348447
| 638
| 9,660
| 4.962382
| 0.083072
| 0.128869
| 0.13645
| 0.083386
| 0.982628
| 0.982628
| 0.982628
| 0.982628
| 0.982628
| 0.982628
| 0
| 0.121443
| 0.552484
| 9,660
| 200
| 75
| 48.3
| 0.610918
| 0
| 0
| 0.833333
| 0
| 0
| 0.32895
| 0.187927
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b79cb9a2281743164268a69caa2d24f7394c4d96
| 22,291
|
py
|
Python
|
tests/milvus_python_test/collection/test_collection_count.py
|
mabergerx/milvus
|
1c48f7f4e47252f8aa2b3c09c9289a9444f5887c
|
[
"Apache-2.0"
] | null | null | null |
tests/milvus_python_test/collection/test_collection_count.py
|
mabergerx/milvus
|
1c48f7f4e47252f8aa2b3c09c9289a9444f5887c
|
[
"Apache-2.0"
] | 2
|
2020-08-20T07:17:50.000Z
|
2020-08-21T04:21:34.000Z
|
tests/milvus_python_test/collection/test_collection_count.py
|
mabergerx/milvus
|
1c48f7f4e47252f8aa2b3c09c9289a9444f5887c
|
[
"Apache-2.0"
] | 1
|
2021-04-19T08:33:19.000Z
|
2021-04-19T08:33:19.000Z
|
import pdb
import copy
import logging
import itertools
from time import sleep
import threading
from multiprocessing import Process
import sklearn.preprocessing
import pytest
from utils import *
from constants import *
uid = "collection_count"
tag = "collection_count_tag"
class TestCollectionCount:
"""
params means different nb, the nb value may trigger merge, or not
"""
@pytest.fixture(
scope="function",
params=[
1,
1000,
2001
],
)
def insert_count(self, request):
yield request.param
"""
generate valid create_index params
"""
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request, connect):
if str(connect._cmd("mode")[1]) == "CPU":
if request.param["index_type"] in index_cpu_not_support():
pytest.skip("sq8h not support in cpu mode")
request.param.update({"metric_type": "L2"})
return request.param
def test_collection_count(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection and add vectors in it,
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
entities = gen_entities(insert_count)
res = connect.bulk_insert(collection, entities)
connect.flush([collection])
res = connect.count_entities(collection)
assert res == insert_count
def test_collection_count_partition(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partition and add vectors in it,
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
entities = gen_entities(insert_count)
connect.create_partition(collection, tag)
res_ids = connect.bulk_insert(collection, entities, partition_tag=tag)
connect.flush([collection])
res = connect.count_entities(collection)
assert res == insert_count
def test_collection_count_multi_partitions_A(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
new_tag = "new_tag"
entities = gen_entities(insert_count)
connect.create_partition(collection, tag)
connect.create_partition(collection, new_tag)
res_ids = connect.bulk_insert(collection, entities)
connect.flush([collection])
res = connect.count_entities(collection)
assert res == insert_count
def test_collection_count_multi_partitions_B(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in one of the partitions,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
new_tag = "new_tag"
entities = gen_entities(insert_count)
connect.create_partition(collection, tag)
connect.create_partition(collection, new_tag)
res_ids = connect.bulk_insert(collection, entities, partition_tag=tag)
connect.flush([collection])
res = connect.count_entities(collection)
assert res == insert_count
def test_collection_count_multi_partitions_C(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in one of the partitions,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of vectors
'''
new_tag = "new_tag"
entities = gen_entities(insert_count)
connect.create_partition(collection, tag)
connect.create_partition(collection, new_tag)
res_ids = connect.bulk_insert(collection, entities)
res_ids_2 = connect.bulk_insert(collection, entities, partition_tag=tag)
connect.flush([collection])
res = connect.count_entities(collection)
assert res == insert_count * 2
def test_collection_count_multi_partitions_D(self, connect, collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in one of the partitions,
assert the value returned by count_entities method is equal to length of entities
expected: the collection count is equal to the length of entities
'''
new_tag = "new_tag"
entities = gen_entities(insert_count)
connect.create_partition(collection, tag)
connect.create_partition(collection, new_tag)
res_ids = connect.bulk_insert(collection, entities, partition_tag=tag)
res_ids2 = connect.bulk_insert(collection, entities, partition_tag=new_tag)
connect.flush([collection])
res = connect.count_entities(collection)
assert res == insert_count * 2
def _test_collection_count_after_index_created(self, connect, collection, get_simple_index, insert_count):
'''
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
entities = gen_entities(insert_count)
res = connect.bulk_insert(collection, entities)
connect.flush([collection])
connect.create_index(collection, default_float_vec_field_name, get_simple_index)
res = connect.count_entities(collection)
assert res == insert_count
def test_count_without_connection(self, collection, dis_connect):
'''
target: test count_entities, without connection
method: calling count_entities with correct params, with a disconnected instance
expected: count_entities raise exception
'''
with pytest.raises(Exception) as e:
dis_connect.count_entities(collection)
def test_collection_count_no_vectors(self, connect, collection):
'''
target: test collection rows_count is correct or not, if collection is empty
method: create collection and no vectors in it,
assert the value returned by count_entities method is equal to 0
expected: the count is equal to 0
'''
res = connect.count_entities(collection)
assert res == 0
class TestCollectionCountIP:
"""
params means different nb, the nb value may trigger merge, or not
"""
@pytest.fixture(
scope="function",
params=[
1,
1000,
2001
],
)
def insert_count(self, request):
yield request.param
"""
generate valid create_index params
"""
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request, connect):
if str(connect._cmd("mode")[1]) == "CPU":
if request.param["index_type"] in index_cpu_not_support():
pytest.skip("sq8h not support in cpu mode")
request.param.update({"metric_type": "IP"})
return request.param
def _test_collection_count_after_index_created(self, connect, collection, get_simple_index, insert_count):
'''
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
entities = gen_entities(insert_count)
res = connect.bulk_insert(collection, entities)
connect.flush([collection])
connect.create_index(collection, field_name, get_simple_index)
res = connect.count_entities(collection)
assert res == insert_count
class TestCollectionCountBinary:
"""
params means different nb, the nb value may trigger merge, or not
"""
@pytest.fixture(
scope="function",
params=[
1,
1000,
2001
],
)
def insert_count(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_jaccard_index(self, request, connect):
if request.param["index_type"] in binary_support():
request.param["metric_type"] = "JACCARD"
return request.param
else:
pytest.skip("Skip index")
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_hamming_index(self, request, connect):
if request.param["index_type"] in binary_support():
request.param["metric_type"] = "HAMMING"
return request.param
else:
pytest.skip("Skip index")
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_substructure_index(self, request, connect):
if request.param["index_type"] == "FLAT":
request.param["metric_type"] = "SUBSTRUCTURE"
return request.param
else:
pytest.skip("Skip index")
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_superstructure_index(self, request, connect):
if request.param["index_type"] == "FLAT":
request.param["metric_type"] = "SUPERSTRUCTURE"
return request.param
else:
pytest.skip("Skip index")
def test_collection_count(self, connect, binary_collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
raw_vectors, entities = gen_binary_entities(insert_count)
res = connect.bulk_insert(binary_collection, entities)
logging.getLogger().info(len(res))
connect.flush([binary_collection])
res = connect.count_entities(binary_collection)
assert res == insert_count
def test_collection_count_partition(self, connect, binary_collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partition and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
raw_vectors, entities = gen_binary_entities(insert_count)
connect.create_partition(binary_collection, tag)
res_ids = connect.bulk_insert(binary_collection, entities, partition_tag=tag)
connect.flush([binary_collection])
res = connect.count_entities(binary_collection)
assert res == insert_count
@pytest.mark.level(2)
def test_collection_count_multi_partitions_A(self, connect, binary_collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
new_tag = "new_tag"
raw_vectors, entities = gen_binary_entities(insert_count)
connect.create_partition(binary_collection, tag)
connect.create_partition(binary_collection, new_tag)
res_ids = connect.bulk_insert(binary_collection, entities)
connect.flush([binary_collection])
res = connect.count_entities(binary_collection)
assert res == insert_count
@pytest.mark.level(2)
def test_collection_count_multi_partitions_B(self, connect, binary_collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in one of the partitions,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
new_tag = "new_tag"
raw_vectors, entities = gen_binary_entities(insert_count)
connect.create_partition(binary_collection, tag)
connect.create_partition(binary_collection, new_tag)
res_ids = connect.bulk_insert(binary_collection, entities, partition_tag=tag)
connect.flush([binary_collection])
res = connect.count_entities(binary_collection)
assert res == insert_count
def test_collection_count_multi_partitions_C(self, connect, binary_collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in one of the partitions,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
new_tag = "new_tag"
raw_vectors, entities = gen_binary_entities(insert_count)
connect.create_partition(binary_collection, tag)
connect.create_partition(binary_collection, new_tag)
res_ids = connect.bulk_insert(binary_collection, entities)
res_ids_2 = connect.bulk_insert(binary_collection, entities, partition_tag=tag)
connect.flush([binary_collection])
res = connect.count_entities(binary_collection)
assert res == insert_count * 2
@pytest.mark.level(2)
def test_collection_count_multi_partitions_D(self, connect, binary_collection, insert_count):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add entities in one of the partitions,
assert the value returned by count_entities method is equal to length of entities
expected: the collection count is equal to the length of entities
'''
new_tag = "new_tag"
raw_vectors, entities = gen_binary_entities(insert_count)
connect.create_partition(binary_collection, tag)
connect.create_partition(binary_collection, new_tag)
res_ids = connect.bulk_insert(binary_collection, entities, partition_tag=tag)
res_ids2 = connect.bulk_insert(binary_collection, entities, partition_tag=new_tag)
connect.flush([binary_collection])
res = connect.count_entities(binary_collection)
assert res == insert_count * 2
def _test_collection_count_after_index_created(self, connect, binary_collection, get_jaccard_index, insert_count):
'''
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
raw_vectors, entities = gen_binary_entities(insert_count)
res = connect.bulk_insert(binary_collection, entities)
connect.flush([binary_collection])
connect.create_index(binary_collection, field_name, get_jaccard_index)
res = connect.count_entities(binary_collection)
assert res == insert_count
def _test_collection_count_after_index_created(self, connect, binary_collection, get_hamming_index, insert_count):
'''
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
raw_vectors, entities = gen_binary_entities(insert_count)
res = connect.bulk_insert(binary_collection, entities)
connect.flush([binary_collection])
connect.create_index(binary_collection, field_name, get_hamming_index)
res = connect.count_entities(binary_collection)
assert res == insert_count
def test_collection_count_no_entities(self, connect, binary_collection):
'''
target: test collection rows_count is correct or not, if collection is empty
method: create collection and no vectors in it,
assert the value returned by count_entities method is equal to 0
expected: the count is equal to 0
'''
res = connect.count_entities(binary_collection)
assert res == 0
class TestCollectionMultiCollections:
"""
params means different nb, the nb value may trigger merge, or not
"""
@pytest.fixture(
scope="function",
params=[
1,
1000,
2001
],
)
def insert_count(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_jaccard_index(self, request, connect):
if request.param["index_type"] in binary_support():
request.param["metric_type"] = "JACCARD"
return request.param
else:
pytest.skip("Skip index")
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_hamming_index(self, request, connect):
if request.param["index_type"] in binary_support():
request.param["metric_type"] = "HAMMING"
return request.param
else:
pytest.skip("Skip index")
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_substructure_index(self, request, connect):
if request.param["index_type"] == "FLAT":
request.param["metric_type"] = "SUBSTRUCTURE"
return request.param
else:
pytest.skip("Skip index")
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_superstructure_index(self, request, connect):
if request.param["index_type"] == "FLAT":
request.param["metric_type"] = "SUPERSTRUCTURE"
return request.param
else:
pytest.skip("Skip index")
def test_collection_count_multi_collections_l2(self, connect, insert_count):
'''
target: test collection rows_count is correct or not with multiple collections of L2
method: create collection and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
entities = gen_entities(insert_count)
collection_list = []
collection_num = 20
for i in range(collection_num):
collection_name = gen_unique_str(uid)
collection_list.append(collection_name)
connect.create_collection(collection_name, default_fields)
res = connect.bulk_insert(collection_name, entities)
connect.flush(collection_list)
for i in range(collection_num):
res = connect.count_entities(collection_list[i])
assert res == insert_count
@pytest.mark.level(2)
def test_collection_count_multi_collections_binary(self, connect, binary_collection, insert_count):
'''
target: test collection rows_count is correct or not with multiple collections of JACCARD
method: create collection and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
raw_vectors, entities = gen_binary_entities(insert_count)
res = connect.bulk_insert(binary_collection, entities)
collection_list = []
collection_num = 20
for i in range(collection_num):
collection_name = gen_unique_str(uid)
collection_list.append(collection_name)
connect.create_collection(collection_name, default_binary_fields)
res = connect.bulk_insert(collection_name, entities)
connect.flush(collection_list)
for i in range(collection_num):
res = connect.count_entities(collection_list[i])
assert res == insert_count
@pytest.mark.level(2)
def test_collection_count_multi_collections_mix(self, connect):
'''
target: test collection rows_count is correct or not with multiple collections of JACCARD
method: create collection and add entities in it,
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
collection_list = []
collection_num = 20
for i in range(0, int(collection_num / 2)):
collection_name = gen_unique_str(uid)
collection_list.append(collection_name)
connect.create_collection(collection_name, default_fields)
res = connect.bulk_insert(collection_name, default_entities)
for i in range(int(collection_num / 2), collection_num):
collection_name = gen_unique_str(uid)
collection_list.append(collection_name)
connect.create_collection(collection_name, default_binary_fields)
res = connect.bulk_insert(collection_name, default_binary_entities)
connect.flush(collection_list)
for i in range(collection_num):
res = connect.count_entities(collection_list[i])
assert res == default_nb
| 41.821764
| 118
| 0.672065
| 2,656
| 22,291
| 5.423193
| 0.059111
| 0.044293
| 0.021244
| 0.032074
| 0.951888
| 0.942377
| 0.938698
| 0.933352
| 0.924674
| 0.924396
| 0
| 0.004281
| 0.255978
| 22,291
| 532
| 119
| 41.900376
| 0.864215
| 0.257862
| 0
| 0.800578
| 0
| 0
| 0.043151
| 0
| 0
| 0
| 0
| 0
| 0.060694
| 1
| 0.104046
| false
| 0
| 0.031792
| 0
| 0.176301
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7acd082790c47cd6b34899e80441f7f31c8aa2b
| 9,233
|
py
|
Python
|
waveletcodec/entropy.py
|
zenathark/jg.waveletcodec
|
7994dd18ef5472e7e4d6447062cf4dc3c2f6463f
|
[
"MIT"
] | 1
|
2017-05-14T01:42:18.000Z
|
2017-05-14T01:42:18.000Z
|
waveletcodec/entropy.py
|
zenathark/jg.waveletcodec
|
7994dd18ef5472e7e4d6447062cf4dc3c2f6463f
|
[
"MIT"
] | null | null | null |
waveletcodec/entropy.py
|
zenathark/jg.waveletcodec
|
7994dd18ef5472e7e4d6447062cf4dc3c2f6463f
|
[
"MIT"
] | null | null | null |
"""Module for Entropy Coding Algorithms.
.. module::entropy
:platform: Unix, Windows
.. modelauthor:: Juan C Galan-Hernandez <jcgalanh@gmail.com>
"""
from __future__ import division
import itertools as it
class arithmeticb(object):
"""Class of a binary arithmetic codec implemented assuming infinite
floating point presicion
"""
_ctr = 0
_l = 0
_h = 1
_buff = 0
_output = []
_p = []
def __init__(self):
super(arithmeticb, self).__init__()
def _initialize(self, data):
self._ctr = 0
self._l = 0
self._h = 0.9999
self._buff = 0
self._output = []
#calculate frequency of 0
x = 1 - sum(data) / len(data)
self._p = [(0, x), (x, 0.9999)]
def encode(self, data):
self._initialize(data)
for i in data:
l_i, h_i = self._p[i]
d = self._h - self._l
self._h = self._l + d * h_i
self._l = self._l + d * l_i
print("l:%f h:%f") % (self._l, self._h)
r = {"payload": self._l, "model": self._p}
return r
def _dinitialize(self):
self._l = 0
self._h = 0.9999
#calculate frequency of 0
def decode(self, data):
self._dinitialize()
self._output = data["model"]
n = data["payload"]
while(n > 0):
for i, (l_i, h_i) in zip(range(len(self._p)), self._p):
if l_i <= n and n < h_i:
self._output.append(i)
d = h_i - l_i
n = (n - l_i) / d
break
return self._output
class barithmeticb(object):
"""Class of a binary arithmetic codec implemented using integer
arithmetic
"""
_underflow_bits = 0
_l = 0
_h = 1
_buff = 0
_output = []
_bit_size = 0
_scale = 0
_sigma = []
_idx = {}
_frequency = []
_accum_freq = []
def __init__(self, sigma, bit_size=16, **kargs):
super(barithmeticb, self).__init__()
self._bit_size = bit_size
self._sigma = sigma
self._idx = dict([i for i in zip(sigma, range(len(sigma)))])
self._scale = 2 ** self._bit_size - 1
if 'model' in kargs:
self._model = kargs['model']
else:
self._model = None
def _initialize(self, data):
self._l = 0
self._h = self._scale
self._buff = 0
self._output = []
#calculate frequency of 0
self._calculate_static_frequency(data)
self._calculate_accum_freq(data)
def encode(self, data):
""" given list using arithmetic encoding."""
self._initialize(data)
for i in data:
l_i = self._accum_freq[self._idx[i] - 1]
h_i = self._accum_freq[self._idx[i]]
d = self._h - self._l
self._h = int(self._l + d * (h_i / self._scale))
self._l = int(self._l + d * (l_i / self._scale))
while self._check_overflow():
pass
while self._check_underflow():
pass
print("l:%d h:%d") % (self._l, self._h)
self._output += [int(i) for i in bin(self._l)[2:]]
r = {"payload": self._output, "model": self._model}
return r
def _calculate_static_frequency(self, data):
self._frequency = [0] * (len(self._sigma))
for i in self._sigma:
self._frequency[self._idx[i]] = data.count(i)
def _calculate_accum_freq(self, data):
self._accum_freq = [0] * (len(self._sigma) + 1)
self._accum_freq[-1] = 0
accum = 0
for i in self._sigma:
self._accum_freq[self._idx[i]] = (accum +
self._frequency[self._idx[i]])
accum += self._frequency[self._idx[i]]
self._scale = accum
def _check_overflow(self):
MSB = 1 << (self._bit_size - 1)
if self._h & MSB == self._l & MSB:
for _ in range(self._underflow_bits):
self._output.append(int(not(self._h & MSB > 0)))
self._output.append(int((self._h & MSB) > 0))
self._underflow_bits = 0
self._shift()
return True
return False
def _check_underflow(self):
MSB = 1 << (self._bit_size - 2)
if self._h & MSB == 0 and self._l & MSB > 1:
self._underflow_bits += 1
low_mask = ((1 << self._bit_size - 1) |
(1 << self._bit_size - 2))
low_mask = ~low_mask & 2 ** self._bit_size - 1
self._l &= low_mask
self._shift()
self._h |= (1 << self._bit_size - 1)
return True
return False
def _shift(self):
self._l <<= 1
self._h <<= 1
self._l &= 2 ** self._bit_size - 1
self._h &= 2 ** self._bit_size - 1
self._h |= 1
def _dinitialize(self):
self._l = 0
self._h = 0.9999
#calculate frequency of 0
def decode(self, data):
self._dinitialize()
self._output = data["model"]
n = data["payload"]
while(n > 0):
for i, (l_i, h_i) in zip(range(len(self._p)), self._p):
if l_i <= n and n < h_i:
self._output.append(i)
d = h_i - l_i
n = (n - l_i) / d
break
return self._output
class abac(object):
"""Class of a binary arithmetic codec implemented using integer
arithmetic
"""
_underflow_bits = 0
_l = 0
_h = 1
_buff = 0
_output = []
_bit_size = 0
_scale = 0
_sigma = []
_idx = {}
_frequency = []
_accum_freq = []
def __init__(self, sigma, bit_size=16, **kargs):
super(abac, self).__init__()
self._bit_size = bit_size
self._sigma = sigma
self._idx = dict([i for i in zip(sigma, range(len(sigma)))])
self._scale = 2 ** self._bit_size - 1
if 'model' in kargs:
self._model = kargs['model']
else:
self._model = None
def _initialize(self):
self._l = 0
self._h = self._scale
self._buff = 0
self._output = []
#calculate frequency of 0
self._accum_freq = [0] * (len(self._sigma) + 1)
self._accum_freq[-1] = 0
self._frequency = [1] * (len(self._sigma))
def encode(self, data):
""" given list using arithmetic encoding."""
self._initialize()
for i in data:
self.push(i)
return self.get_current_stream()
def push(self, symbol):
self._frequency[self._idx[symbol]] += 1
self._calculate_accum_freq()
l_i = self._accum_freq[self._idx[symbol] - 1]
h_i = self._accum_freq[self._idx[symbol]]
d = self._h - self._l
self._h = int(self._l + d * (h_i / self._scale))
self._l = int(self._l + d * (l_i / self._scale))
while self._check_overflow():
pass
while self._check_underflow():
pass
def get_current_stream(self):
output = self._output + [int(i) for i in bin(self._l)[2:]]
r = {"payload": output, "model": self._model}
return r
def _calculate_accum_freq(self):
accum = 0
for i in self._sigma:
self._accum_freq[self._idx[i]] = (accum +
self._frequency[self._idx[i]])
accum += self._frequency[self._idx[i]]
self._scale = accum
def _check_overflow(self):
MSB = 1 << (self._bit_size - 1)
if self._h & MSB == self._l & MSB:
for _ in range(self._underflow_bits):
self._output.append(int(not(self._h & MSB > 0)))
self._output.append(int((self._h & MSB) > 0))
self._underflow_bits = 0
self._shift()
return True
return False
def _check_underflow(self):
MSB = 1 << (self._bit_size - 2)
if self._h & MSB == 0 and self._l & MSB > 1:
self._underflow_bits += 1
low_mask = ((1 << self._bit_size - 1) |
(1 << self._bit_size - 2))
low_mask = ~low_mask & 2 ** self._bit_size - 1
self._l &= low_mask
self._shift()
self._h |= (1 << self._bit_size - 1)
return True
return False
def _shift(self):
self._l <<= 1
self._h <<= 1
self._l &= 2 ** self._bit_size - 1
self._h &= 2 ** self._bit_size - 1
self._h |= 1
def _dinitialize(self):
self._l = 0
self._h = 0.9999
#calculate frequency of 0
def decode(self, data):
self._dinitialize()
self._output = data["model"]
n = data["payload"]
while(n > 0):
for i, (l_i, h_i) in zip(range(len(self._p)), self._p):
if l_i <= n and n < h_i:
self._output.append(i)
d = h_i - l_i
n = (n - l_i) / d
break
return self._output
def length(self):
return len(self._output) + self._bit_size
| 29.126183
| 76
| 0.508069
| 1,197
| 9,233
| 3.599833
| 0.099415
| 0.038292
| 0.053609
| 0.038988
| 0.83198
| 0.808308
| 0.801114
| 0.784637
| 0.737294
| 0.717336
| 0
| 0.023467
| 0.367703
| 9,233
| 316
| 77
| 29.218354
| 0.714628
| 0.065201
| 0
| 0.814516
| 0
| 0
| 0.012849
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108871
| false
| 0.016129
| 0.008065
| 0.004032
| 0.306452
| 0.008065
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d141c1cb313187e3ffac71b741aa1ca96ee4953
| 70,129
|
py
|
Python
|
nba_py/league.py
|
foxymen9/nba_export_py
|
b9f4c35f92529cf99d13bf32b78856b8fca89a58
|
[
"BSD-3-Clause"
] | null | null | null |
nba_py/league.py
|
foxymen9/nba_export_py
|
b9f4c35f92529cf99d13bf32b78856b8fca89a58
|
[
"BSD-3-Clause"
] | null | null | null |
nba_py/league.py
|
foxymen9/nba_export_py
|
b9f4c35f92529cf99d13bf32b78856b8fca89a58
|
[
"BSD-3-Clause"
] | null | null | null |
from datetime import datetime
from nba_py import _api_scrape, _get_json, _api_scrape_playtype, _get_json_playtype, _api_scrape_with_headers
from nba_py import constants
from . import helpers
CURRENT_TIME = helpers.totimestamp(datetime.now())
class Leaders:
_endpoint = 'leagueleaders'
def __init__(self,
league_id=constants.League.Default,
per_mode=constants.PerMode.Default,
stat_category=constants.StatCategory.Default,
season=constants.CURRENT_SEASON,
season_type=constants.SeasonType.Default,
scope=constants.Scope.Default,):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'PerMode': per_mode,
'StatCategory': stat_category,
'Season': season,
'SeasonType': season_type,
'Scope': scope})
def results(self):
return _api_scrape(self.json, 0)
class LeadersTiles:
_endpoint = 'leaderstiles'
def __init__(self,
league_id=constants.League.Default,
season=constants.CURRENT_SEASON,
season_type=constants.SeasonType.Default,
game_scope=constants.GameScope.Default,
player_scope=constants.PlayerScope.Default,
player_or_team=constants.PlayerOrTeam.Default,
stat_category=constants.StatCategory.Default):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'Stat': stat_category,
'Season': season,
'SeasonType': season_type,
'GameScope': game_scope,
'PlayerScope': player_scope,
'PlayerOrTeam': player_or_team})
def current_season_high(self):
return _api_scrape(self.json, 0)
def alltime_season_high(self):
return _api_scrape(self.json, 1)
def last_season_high(self):
return _api_scrape(self.json, 2)
def low_season_high(self):
return _api_scrape(self.json, 3)
class Lineups:
_endpoint = 'leaguedashlineups'
def __init__(self,
group_quantity=constants.GroupQuantity.Default,
season_type=constants.SeasonType.Default,
measure_type=constants.MeasureType.Default,
per_mode=constants.PerMode.Default,
plus_minus=constants.PlusMinus.Default,
pace_adjust=constants.PaceAdjust.Default,
rank=constants.Rank.Default,
season=constants.CURRENT_SEASON,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
last_n_games=constants.LastNGames.Default):
self.json = _get_json(endpoint=self._endpoint,
params={'GroupQuantity': group_quantity,
'SeasonType': season_type,
'MeasureType': measure_type,
'PerMode': per_mode,
'PlusMinus': plus_minus,
'PaceAdjust': pace_adjust,
'Rank': rank,
'Season': season,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'GameSegment': game_segment,
'Period': period,
'LastNGames': last_n_games})
def overall(self):
return _api_scrape(self.json, 0)
class GameLog:
_endpoint = 'leaguegamelog'
def __init__(self,
league_id=constants.League.Default,
season=constants.CURRENT_SEASON,
season_type=constants.SeasonType.Default,
player_or_team=constants.Player_or_Team.Default,
counter=constants.Counter.Default,
sorter=constants.Sorter.DATE,
direction=constants.Direction.Default,
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'Season': season,
'SeasonType': season_type,
'PlayerOrTeam': player_or_team,
'Counter': counter,
'Sorter': sorter,
'Direction': direction
})
def overall(self):
return _api_scrape(self.json, 0)
class GameLogs:
def __init__(self,
endpoint=constants._DefaultBlank.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
game_segment=constants.GameSegment.Default,
last_n_games=constants.LastNGames.Default,
league_id=constants.League.Default,
location=constants.Location.Default,
measure_type=constants.MeasureType.Default,
month=constants.Month.Default,
opponent_team_id=constants.OpponentTeamID.Default,
outcome=constants.Outcome.Default,
playoff_round=constants.PlayoffRound.Default,
per_mode=constants.PerMode.Totals,
period=constants.Period.Default,
player_id=constants.TeamID.Default,
season_segment=constants.SeasonSegment.Default,
season_type=constants.SeasonType.Default,
season=constants.CURRENT_SEASON,
shot_clock_range=constants.ShotClockRange.Default,
team_id=constants.TeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default
):
self.json = _get_json(endpoint=endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'MeasureType': measure_type,
'PerMode': per_mode,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'PlayerID': player_id,
'GameSegment': game_segment,
'Period': period,
'ShotClockRange': shot_clock_range,
'LastNGames': last_n_games
})
def overall(self):
return _api_scrape(self.json, 0)
class TeamStats:
_endpoint = 'leaguedashteamstats'
def __init__(self,
conference=constants.Conference.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
division=constants.Division.Default,
game_scope=constants.Game_Scope.Default,
game_segment=constants.GameSegment.Default,
last_n_games=constants.LastNGames.Default,
league_id=constants.League.Default,
location=constants.Location.Default,
measure_type=constants.MeasureType.Default,
month=constants.Month.Default,
opponent_team_id=constants.OpponentTeamID.Default,
outcome=constants.Outcome.Default,
playoff_round=constants.PlayoffRound.Default,
pace_adjust=constants.PaceAdjust.Default,
per_mode=constants.PerMode.Default,
period=constants.Period.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
plus_minus=constants.PlusMinus.Default,
rank=constants.Rank.Default,
season=constants.CURRENT_SEASON,
season_segment=constants.SeasonSegment.Default,
season_type=constants.SeasonType.Default,
shot_clock_range=constants.ShotClockRange.Default,
starter_bench=constants.StarterBench.Default,
team_id=constants.TeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'MeasureType': measure_type,
'PerMode': per_mode,
'PlusMinus': plus_minus,
'PaceAdjust': pace_adjust,
'Rank': rank,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'ShotClockRange': shot_clock_range,
'LastNGames': last_n_games,
'GameScope': game_scope,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'StarterBench': starter_bench,
})
def overall(self):
return _api_scrape(self.json, 0)
class PlayerStats:
_endpoint = 'leaguedashplayerstats'
def __init__(self,
season_type=constants.SeasonType.Default,
measure_type=constants.MeasureType.Default,
per_mode=constants.PerMode.Default,
plus_minus=constants.PlusMinus.Default,
pace_adjust=constants.PaceAdjust.Default,
rank=constants.Rank.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
shot_clock_range=constants.ShotClockRange.Default,
last_n_games=constants.LastNGames.Default,
game_scope=constants.Game_Scope.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
starter_bench=constants.StarterBench.Default,
draft_year=constants.DraftYear.Default,
draft_pick=constants.DraftPick.Default,
college=constants.College.Default,
country=constants.Country.Default,
height=constants.Height.Default,
weight=constants.Weight.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'SeasonType': season_type,
'MeasureType': measure_type,
'PerMode': per_mode,
'PlusMinus': plus_minus,
'PaceAdjust': pace_adjust,
'Rank': rank,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'ShotClockRange': shot_clock_range,
'LastNGames': last_n_games,
'GameScope': game_scope,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'StarterBench': starter_bench,
'DraftYear': draft_year,
'DraftPick': draft_pick,
'College': college,
'Country': country,
'Height': height,
'Weight': weight
})
def overall(self):
return _api_scrape(self.json, 0)
class PlayerDetails:
_endpoint = 'leagueplayerondetails'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
measure_type=constants.MeasureType.Default,
per_mode=constants.PerMode.Default,
plus_minus=constants.PlusMinus.Default,
pace_adjust=constants.PaceAdjust.Default,
rank=constants.Rank.Default,
season=constants.CURRENT_SEASON,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
last_n_games=constants.LastNGames.Default,
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'MeasureType': measure_type,
'PerMode': per_mode,
'PlusMinus': plus_minus,
'PaceAdjust': pace_adjust,
'Rank': rank,
'Season': season,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'GameSegment': game_segment,
'Period': period,
'LastNGames': last_n_games,
})
def overall(self):
return _api_scrape(self.json, 0)
class _PlayerTrackingStats:
"""
Args:
:league_id: ID for the league to look in (Default is 00)
:season_type: Season type to consider (Regular or Playoffs)
:player_or_team: Filter by (Player or Team)
:per_mode: Mode to measure statistics (Totals, PerGame, Per36, etc.)
:season: Season given to look up
:playoff_round: Playoff round
:outcome: Filter out by wins or losses
:location: Filter out by home or away
:month: Specify month to filter by
:season_segment: Filter by pre/post all star break
:date_from: Filter out games before a specific date
:date_to: Filter out games after a specific date
:opponent_team_id: Opponent team ID to look up
:vs_conference: Filter by conference
:vs_division: Filter by division
:team_id: ID of the team to look up
:conference: Filter by conference
:division: Filter by division
:last_n_games: Filter by number of games specified in N
:game_scope: Filter by GameScope (Yesterday, Last 10)
:player_experience: Player experience (Rookie, Sophomore, Veteran)
:player_position: Filter by position (Forward, Center, Guard)
:starter_bench: Filter by Starters or Bench
:draft_year: Filter by draft year
:draft_pick: Filter by draft pick (1st+Round, Lottery+Pick, etc.)
:college: Filter by college
:country: Filter by country
:height: Filter by player's height
:weight: Filter by player's weight
Attributes:
:json: Contains the full json dump to play around with
"""
_endpoint = 'leaguedashptstats'
_pt_measure_type = ''
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
player_or_team=constants.PlayerOrTeam.Default,
per_mode=constants.PerMode.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
last_n_games=constants.LastNGames.Default,
game_scope=constants.Game_Scope.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
pt_measure_type=_pt_measure_type,
starter_bench=constants.StarterBench.Default,
draft_year=constants.DraftYear.Default,
draft_pick=constants.DraftPick.Default,
college=constants.College.Default,
country=constants.Country.Default,
height=constants.Height.Default,
weight=constants.Weight.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'PtMeasureType': pt_measure_type,
'SeasonType': season_type,
'PlayerOrTeam': player_or_team,
'PerMode': per_mode,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'LastNGames': last_n_games,
'GameScope': game_scope,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'StarterBench': starter_bench,
'DraftYear': draft_year,
'DraftPick': draft_pick,
'College': college,
'Country': country,
'Height': height,
'Weight': weight
})
def overall(self):
return _api_scrape(self.json, 0)
class PlayerSpeedDistanceTracking(_PlayerTrackingStats):
"""
Statistics that measure the distance covered and the average speed of all
movements (sprinting, jogging, standing, walking, backwards and forwards)
by a player while on the court.
"""
_pt_measure_type = constants.PtMeasureType.SpeedDistance
class TeamClutch:
_endpoint = 'leaguedashteamclutch'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
measure_type=constants.MeasureType.Default,
per_mode=constants.PerMode.Default,
plus_minus=constants.PlusMinus.Default,
pace_adjust=constants.PaceAdjust.Default,
rank=constants.Rank.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
shot_clock_range=constants.ShotClockRange.Default,
last_n_games=constants.LastNGames.Default,
game_scope=constants.Game_Scope.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
starter_bench=constants.StarterBench.Default,
ahead_behind=constants.AheadBehind.Default,
clutch_time=constants.ClutchTime.Default,
point_diff=constants.PointDiff.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'MeasureType': measure_type,
'PerMode': per_mode,
'PlusMinus': plus_minus,
'PaceAdjust': pace_adjust,
'Rank': rank,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'ShotClockRange': shot_clock_range,
'LastNGames': last_n_games,
'GameScope': game_scope,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'StarterBench': starter_bench,
'AheadBehind': ahead_behind,
'ClutchTime': clutch_time,
'PointDiff': point_diff
})
def overall(self):
return _api_scrape(self.json, 0)
class PlayerClutch:
_endpoint = 'leaguedashplayerclutch'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
measure_type=constants.MeasureType.Default,
per_mode=constants.PerMode.Default,
plus_minus=constants.PlusMinus.Default,
pace_adjust=constants.PaceAdjust.Default,
rank=constants.Rank.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
shot_clock_range=constants.ShotClockRange.Default,
last_n_games=constants.LastNGames.Default,
game_scope=constants.Game_Scope.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
starter_bench=constants.StarterBench.Default,
ahead_behind=constants.AheadBehind.Default,
clutch_time=constants.ClutchTime.Default,
point_diff=constants.PointDiff.Default,
draft_year=constants.DraftYear.Default,
draft_pick=constants.DraftPick.Default,
college=constants.College.Default,
country=constants.Country.Default,
height=constants.Height.Default,
weight=constants.Weight.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'MeasureType': measure_type,
'PerMode': per_mode,
'PlusMinus': plus_minus,
'PaceAdjust': pace_adjust,
'Rank': rank,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'ShotClockRange': shot_clock_range,
'LastNGames': last_n_games,
'GameScope': game_scope,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'StarterBench': starter_bench,
'AheadBehind': ahead_behind,
'ClutchTime': clutch_time,
'PointDiff': point_diff,
'DraftYear': draft_year,
'DraftPick': draft_pick,
'College': college,
'Country': country,
'Height': height,
'Weight': weight
})
def overall(self):
return _api_scrape(self.json, 0)
class TeamPlaytype:
_endpoint = 'team'
def __init__(self,
category=constants._DefaultBlank.Default,
season=constants.CURRENT_SEASON,
season_type=constants.PlaytypeSeasonType.Default,
time=CURRENT_TIME,
names=constants.PlaytypeNames.Default,
limit=constants.PlaytypeLimit.All,
):
self.json = _get_json_playtype(endpoint=self._endpoint,
params={'category': category,
'season': season,
'seasonType': season_type,
'names': names,
'q': time,
'limit': limit
})
def overall(self):
return _api_scrape_playtype(self.json)
class PlayerPlaytype:
_endpoint = 'player'
def __init__(self,
category=constants._DefaultBlank.Default,
season=constants.CURRENT_SEASON,
season_type=constants.PlaytypeSeasonType.Default,
time=CURRENT_TIME,
names=constants.PlaytypeNames.Default,
limit=constants.PlaytypeLimit.All,
):
self.json = _get_json_playtype(endpoint=self._endpoint,
params={'category': category,
'season': season,
'seasonType': season_type,
'names': names,
'q': time,
'limit': limit
})
def overall(self):
return _api_scrape_playtype(self.json)
class TeamDefenseDashboard:
_endpoint = 'leaguedashptteamdefend'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
defense_category=constants._DefaultBlank.Default,
per_mode=constants.PerMode.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
last_n_games=constants.LastNGames.Default,
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'DefenseCategory': defense_category,
'PerMode': per_mode,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'LastNGames': last_n_games,
})
def overall(self):
return _api_scrape(self.json, 0)
class PlayerDefenseDashboard:
_endpoint = 'leaguedashptdefend'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
defense_category=constants._DefaultBlank.Default,
per_mode=constants.PerMode.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
player_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
last_n_games=constants.LastNGames.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
starter_bench=constants.StarterBench.Default,
draft_year=constants.DraftYear.Default,
draft_pick=constants.DraftPick.Default,
college=constants.College.Default,
country=constants.Country.Default,
height=constants.Height.Default,
weight=constants.Weight.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'DefenseCategory': defense_category,
'PerMode': per_mode,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'PlayerID': player_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'LastNGames': last_n_games,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'StarterBench': starter_bench,
'DraftYear': draft_year,
'DraftPick': draft_pick,
'College': college,
'Country': country,
'Height': height,
'Weight': weight
})
def overall(self):
return _api_scrape(self.json, 0)
class TeamShotDashboard:
_endpoint = 'leaguedashteamptshot'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
per_mode=constants.PerMode.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
last_n_games=constants.LastNGames.Default,
general_range=constants._DefaultBlank.Default,
shotclock_range=constants._DefaultBlank.Default,
dribbles_range=constants._DefaultBlank.Default,
touchtime_range=constants._DefaultBlank.Default,
closedefdist_range=constants._DefaultBlank.Default,
shotdist_range=constants._DefaultBlank.Default,
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'PerMode': per_mode,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'LastNGames': last_n_games,
'GeneralRange': general_range,
'ShotClockRange': shotclock_range,
'DribbleRange': dribbles_range,
'TouchTimeRange': touchtime_range,
'CloseDefDistRange': closedefdist_range,
'ShotDistRange': shotdist_range,
})
def overall(self):
return _api_scrape(self.json, 0)
class PlayerShotDashboard:
_endpoint = 'leaguedashplayerptshot'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
per_mode=constants.PerMode.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
last_n_games=constants.LastNGames.Default,
general_range=constants._DefaultBlank.Default,
shotclock_range=constants._DefaultBlank.Default,
dribbles_range=constants._DefaultBlank.Default,
touchtime_range=constants._DefaultBlank.Default,
closedefdist_range=constants._DefaultBlank.Default,
shotdist_range=constants._DefaultBlank.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
starter_bench=constants.StarterBench.Default,
draft_year=constants.DraftYear.Default,
draft_pick=constants.DraftPick.Default,
college=constants.College.Default,
country=constants.Country.Default,
height=constants.Height.Default,
weight=constants.Weight.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'PerMode': per_mode,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'LastNGames': last_n_games,
'GeneralRange': general_range,
'ShotClockRange': shotclock_range,
'DribbleRange': dribbles_range,
'TouchTimeRange': touchtime_range,
'CloseDefDistRange': closedefdist_range,
'ShotDistRange': shotdist_range,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'StarterBench': starter_bench,
'DraftYear': draft_year,
'DraftPick': draft_pick,
'College': college,
'Country': country,
'Height': height,
'Weight': weight
})
def overall(self):
return _api_scrape(self.json, 0)
class TeamShooting:
_endpoint = 'leaguedashteamshotlocations'
def __init__(self,
conference=constants.Conference.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
distance_range=constants.DistanceRange.Default,
division=constants.Division.Default,
game_scope=constants.Game_Scope.Default,
game_segment=constants.GameSegment.Default,
last_n_games=constants.LastNGames.Default,
league_id=constants.League.Default,
location=constants.Location.Default,
measure_type=constants.MeasureType.Default,
month=constants.Month.Default,
opponent_team_id=constants.OpponentTeamID.Default,
outcome=constants.Outcome.Default,
playoff_round=constants.PlayoffRound.Default,
pace_adjust=constants.PaceAdjust.Default,
per_mode=constants.PerMode.Default,
period=constants.Period.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
plus_minus=constants.PlusMinus.Default,
rank=constants.Rank.Default,
season=constants.CURRENT_SEASON,
season_segment=constants.SeasonSegment.Default,
season_type=constants.SeasonType.Default,
shot_clock_range=constants.ShotClockRange.Default,
starter_bench=constants.StarterBench.Default,
team_id=constants.TeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'MeasureType': measure_type,
'PerMode': per_mode,
'PlusMinus': plus_minus,
'PaceAdjust': pace_adjust,
'Rank': rank,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'ShotClockRange': shot_clock_range,
'LastNGames': last_n_games,
'GameScope': game_scope,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'StarterBench': starter_bench,
'DistanceRange': distance_range,
})
def overall(self):
return _api_scrape_with_headers(self.json)
class PlayerShooting:
_endpoint = 'leaguedashplayershotlocations'
def __init__(self,
conference=constants.Conference.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
distance_range=constants.DistanceRange.Default,
division=constants.Division.Default,
game_scope=constants.Game_Scope.Default,
game_segment=constants.GameSegment.Default,
last_n_games=constants.LastNGames.Default,
league_id=constants.League.Default,
location=constants.Location.Default,
measure_type=constants.MeasureType.Default,
month=constants.Month.Default,
opponent_team_id=constants.OpponentTeamID.Default,
outcome=constants.Outcome.Default,
playoff_round=constants.PlayoffRound.Default,
pace_adjust=constants.PaceAdjust.Default,
per_mode=constants.PerMode.Default,
period=constants.Period.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
plus_minus=constants.PlusMinus.Default,
rank=constants.Rank.Default,
season=constants.CURRENT_SEASON,
season_segment=constants.SeasonSegment.Default,
season_type=constants.SeasonType.Default,
shot_clock_range=constants.ShotClockRange.Default,
starter_bench=constants.StarterBench.Default,
team_id=constants.TeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
draft_year=constants.DraftYear.Default,
draft_pick=constants.DraftPick.Default,
college=constants.College.Default,
country=constants.Country.Default,
height=constants.Height.Default,
weight=constants.Weight.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'MeasureType': measure_type,
'PerMode': per_mode,
'PlusMinus': plus_minus,
'PaceAdjust': pace_adjust,
'Rank': rank,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'ShotClockRange': shot_clock_range,
'LastNGames': last_n_games,
'GameScope': game_scope,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'StarterBench': starter_bench,
'DistanceRange': distance_range,
'DraftYear': draft_year,
'DraftPick': draft_pick,
'College': college,
'Country': country,
'Height': height,
'Weight': weight
})
def overall(self):
return _api_scrape_with_headers(self.json)
class TeamOpponentShooting:
_endpoint = 'leaguedashoppptshot'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
per_mode=constants.PerMode.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
game_segment=constants.GameSegment.Default,
period=constants.Period.Default,
last_n_games=constants.LastNGames.Default,
general_range=constants._DefaultBlank.Default,
shotclock_range=constants._DefaultBlank.Default,
dribbles_range=constants._DefaultBlank.Default,
touchtime_range=constants._DefaultBlank.Default,
closedefdist_range=constants._DefaultBlank.Default,
shotdist_range=constants._DefaultBlank.Default,
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'PerMode': per_mode,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'GameSegment': game_segment,
'Period': period,
'LastNGames': last_n_games,
'GeneralRange': general_range,
'ShotClockRange': shotclock_range,
'DribbleRange': dribbles_range,
'TouchTimeRange': touchtime_range,
'CloseDefDistRange': closedefdist_range,
'ShotDistRange': shotdist_range,
})
def overall(self):
return _api_scrape(self.json, 0)
class TeamHustle:
_endpoint = 'leaguehustlestatsteam'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
per_mode=constants.PerMode.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
last_n_games=constants.LastNGames.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
draft_year=constants.DraftYear.Default,
draft_pick=constants.DraftPick.Default,
college=constants.College.Default,
country=constants.Country.Default,
height=constants.Height.Default,
weight=constants.Weight.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'PerMode': per_mode,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'LastNGames': last_n_games,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'DraftYear': draft_year,
'DraftPick': draft_pick,
'College': college,
'Country': country,
'Height': height,
'Weight': weight
})
def overall(self):
return _api_scrape(self.json, 0)
class PlayerHustle:
_endpoint = 'leaguehustlestatsplayer'
def __init__(self,
league_id=constants.League.Default,
season_type=constants.SeasonType.Default,
per_mode=constants.PerMode.Default,
season=constants.CURRENT_SEASON,
playoff_round=constants.PlayoffRound.Default,
outcome=constants.Outcome.Default,
location=constants.Location.Default,
month=constants.Month.Default,
season_segment=constants.SeasonSegment.Default,
date_from=constants.DateFrom.Default,
date_to=constants.DateTo.Default,
opponent_team_id=constants.OpponentTeamID.Default,
vs_conference=constants.VsConference.Default,
vs_division=constants.VsDivision.Default,
team_id=constants.TeamID.Default,
conference=constants.Conference.Default,
division=constants.Division.Default,
last_n_games=constants.LastNGames.Default,
player_experience=constants.PlayerExperience.Default,
player_position=constants.PlayerPosition.Default,
draft_year=constants.DraftYear.Default,
draft_pick=constants.DraftPick.Default,
college=constants.College.Default,
country=constants.Country.Default,
height=constants.Height.Default,
weight=constants.Weight.Default
):
self.json = _get_json(endpoint=self._endpoint,
params={'LeagueID': league_id,
'SeasonType': season_type,
'PerMode': per_mode,
'Season': season,
'PORound': playoff_round,
'Outcome': outcome,
'Location': location,
'Month': month,
'SeasonSegment': season_segment,
'DateFrom': date_from,
'DateTo': date_to,
'OpponentTeamID': opponent_team_id,
'VsConference': vs_conference,
'VsDivision': vs_division,
'TeamID': team_id,
'Conference': conference,
'Division': division,
'LastNGames': last_n_games,
'PlayerExperience': player_experience,
'PlayerPosition': player_position,
'DraftYear': draft_year,
'DraftPick': draft_pick,
'College': college,
'Country': country,
'Height': height,
'Weight': weight
})
def overall(self):
return _api_scrape(self.json, 0)
| 52.101783
| 109
| 0.468979
| 4,740
| 70,129
| 6.700211
| 0.055274
| 0.013036
| 0.01587
| 0.014956
| 0.908593
| 0.905948
| 0.900406
| 0.892975
| 0.885985
| 0.884663
| 0
| 0.000744
| 0.463204
| 70,129
| 1,345
| 110
| 52.14052
| 0.842901
| 0.023614
| 0
| 0.924917
| 0
| 0
| 0.074001
| 0.003045
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038779
| false
| 0
| 0.0033
| 0.020627
| 0.10066
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d2622b4f5dfce655d16a0bedb090bcde55c6a52
| 4,455
|
py
|
Python
|
algorithms_and_evaluation/test.py
|
Ponyooo/book-recommender
|
278fc0f7b3daf13e9073962bac4916e11af427f7
|
[
"MIT"
] | null | null | null |
algorithms_and_evaluation/test.py
|
Ponyooo/book-recommender
|
278fc0f7b3daf13e9073962bac4916e11af427f7
|
[
"MIT"
] | null | null | null |
algorithms_and_evaluation/test.py
|
Ponyooo/book-recommender
|
278fc0f7b3daf13e9073962bac4916e11af427f7
|
[
"MIT"
] | null | null | null |
#-*-coding:utf-8-*-
"""
@author: hjx
评测算法性能
"""
from algorithms import usercf
from algorithms import itemcf
from algorithms import useriif
from algorithms import itemiuf
from util import user_book_reader
from util import evaluation
import time
#usercf 将数据分成8份,选取不同的k值进行8次实验,避免过拟合
recall = 0
precision = 0
coverage = 0
popularity = 0
for k in range(0, 8):
#生成训练集和测试集 [(uid, iid)]
trainset,testset = user_book_reader.read_rating_data(k=k)
print("训练集数量:" + str(len(trainset)))
print("测试集数量:" + str(len(testset)))
#training
ucf = usercf.UserCF(trainset)
ucf.train()
#评测算法性能
test = dict()
for user, item in testset:
test.setdefault(user, list())
test[user].append(item)
recommends = ucf.recommends(test.keys(), 20, 30)
#1.计算召回率
recall += evaluation.recall(recommends, test)
#2.计算准确率
precision += evaluation.precision(recommends, test)
#3.计算覆盖率
coverage += evaluation.coverage(recommends, trainset)
#4.计算新颖度
popularity += evaluation.popularity(recommends, trainset)
average_recall = recall / 8.0
average_precision = precision / 8.0
average_coverage = coverage / 8.0
average_popularity = popularity / 8.0
print("usercf性能:")
print(average_recall)
print(average_precision)
print(average_coverage)
print(average_popularity)
#useriif 将数据分成8份,选取不同的k值进行8次实验,避免过拟合
recall = 0
precision = 0
coverage = 0
popularity = 0
for k in range(0, 8):
#生成训练集和测试集 [(uid, iid)]
trainset,testset = user_book_reader.read_rating_data(k=0)
print("训练集数量:" + str(len(trainset)))
print("测试集数量:" + str(len(testset)))
#training
uiif = useriif.UserIIF(trainset)
uiif.train()
#评测算法性能
test = dict()
for user, item in testset:
test.setdefault(user, list())
test[user].append(item)
recommends = uiif.recommends(test.keys(), 20, 30)
#1.计算召回率
recall += evaluation.recall(recommends, test)
#2.计算准确率
precision += evaluation.precision(recommends, test)
#3.计算覆盖率
coverage += evaluation.coverage(recommends, trainset)
#4.计算新颖度
popularity += evaluation.popularity(recommends, trainset)
#8次实验的平均值
average_recall = recall / 8.0
average_precision = precision / 8.0
average_coverage = coverage / 8.0
average_popularity = popularity / 8.0
print("useriif性能:")
print(average_recall)
print(average_precision)
print(average_coverage)
print(average_popularity)
#itemcf 将数据分成8份,选取不同的k值进行8次实验,避免过拟合
recall = 0
precision = 0
coverage = 0
popularity = 0
for k in range(0, 8):
#生成训练集和测试集 [(uid, iid)]
trainset,testset = user_book_reader.read_rating_data(k=0)
print("训练集数量:" + str(len(trainset)))
print("测试集数量:" + str(len(testset)))
#training
icf = itemcf.ItemCF(trainset)
icf.train()
#评测算法性能
test = dict()
for user, item in testset:
test.setdefault(user, list())
test[user].append(item)
recommends = icf.recommends(test.keys(),20, 30)
#1.计算召回率
recall += evaluation.recall(recommends, test)
#2.计算准确率
precision += evaluation.precision(recommends, test)
#3.计算覆盖率
coverage += evaluation.coverage(recommends, trainset)
#4.计算新颖度
popularity += evaluation.popularity(recommends, trainset)
#8次实验的平均值
average_recall = recall / 8.0
average_precision = precision / 8.0
average_coverage = coverage / 8.0
average_popularity = popularity / 8.0
print("itemcf性能:")
print(average_recall)
print(average_precision)
print(average_coverage)
print(average_popularity)
#itemiuf 将数据分成8份,选取不同的k值进行8次实验,避免过拟合
recall = 0
precision = 0
coverage = 0
popularity = 0
for k in range(0, 8):
#生成训练集和测试集 [(uid, iid)]
trainset,testset = user_book_reader.read_rating_data(k=0)
print("训练集数量:" + str(len(trainset)))
print("测试集数量:" + str(len(testset)))
#training
iiuf = itemiuf.ItemIUF(trainset)
iiuf.train()
#评测算法性能
test = dict()
for user, item in testset:
test.setdefault(user, list())
test[user].append(item)
recommends = iiuf.recommends(test.keys(), 20, 30)
#1.计算召回率
recall += evaluation.recall(recommends, test)
print(" 召回率 " + str(recall))
#2.计算准确率
precision += evaluation.precision(recommends, test)
print(" 准确率 " + str(precision))
#3.计算覆盖率
coverage += evaluation.coverage(recommends, trainset)
print(" 覆盖率 " + str(coverage))
#4.计算新颖度
popularity += evaluation.popularity(recommends, trainset)
print(" 新颖度 " + str(popularity))
#8次实验的平均值
average_recall = recall / 8.0
average_precision = precision / 8.0
average_coverage = coverage / 8.0
average_popularity = popularity / 8.0
print("itemiuf性能:")
print(average_recall)
print(average_precision)
print(average_coverage)
print(average_popularity)
| 19.369565
| 58
| 0.732211
| 585
| 4,455
| 5.490598
| 0.133333
| 0.009963
| 0.033624
| 0.038605
| 0.857721
| 0.857721
| 0.857721
| 0.810399
| 0.810399
| 0.810399
| 0
| 0.026781
| 0.1367
| 4,455
| 229
| 59
| 19.454148
| 0.808372
| 0.102581
| 0
| 0.772358
| 0
| 0
| 0.027041
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.056911
| 0
| 0.056911
| 0.260163
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d6b6fee9d7efdf9f0d439e109cae2b0be5853e8
| 9,945
|
py
|
Python
|
authors/apps/articles/test/test_like_dislike_articles.py
|
andela/ah-leagueOfLegends
|
ebe3a4621a5baf36a9345d4b126ba73dc37acd1f
|
[
"BSD-3-Clause"
] | null | null | null |
authors/apps/articles/test/test_like_dislike_articles.py
|
andela/ah-leagueOfLegends
|
ebe3a4621a5baf36a9345d4b126ba73dc37acd1f
|
[
"BSD-3-Clause"
] | 43
|
2018-08-27T16:53:58.000Z
|
2022-03-11T23:28:24.000Z
|
authors/apps/articles/test/test_like_dislike_articles.py
|
andela/ah-leagueOfLegends
|
ebe3a4621a5baf36a9345d4b126ba73dc37acd1f
|
[
"BSD-3-Clause"
] | 2
|
2018-10-30T10:30:35.000Z
|
2018-11-12T07:48:02.000Z
|
from .test_articles import ArticleTestCase
from django.urls import reverse
from rest_framework.views import status
class DislikeLikeArticleTestCase(ArticleTestCase):
"""Tests Like and Dislike articles views"""
article = {
"article":
{
"author": "jake",
"body": "It takes a Jacobian",
"tagList": [
"dragons",
"training"
],
"created_at_date": "2018-09-11T19:56:22.112185+00:00",
"description": "Ever wonder how?",
"slug": "how-to-train-your-dragon",
"title": "How to train your dragon",
"updated_at_date": "2018-09-11T19:56:22.112220+00:00",
"like": 0,
"dislike": 0
}
}
def test_if_user_can_like_without_authentication(self):
"""Test if user can like article without authentication"""
# Like an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/like/')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data['detail'], "Authentication credentials were not provided.")
def test_if_user_can_dislike_without_authentication(self):
"""Test if user can dislike article without authentication"""
# Dislike an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/dislike/')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data['detail'], "Authentication credentials were not provided.")
def test_if_user_can_like_unexisting_article(self):
"""Test if the user can like an article that does not exist"""
# Register user
self.register_user()
# Login user
res = self.client.post(
reverse('authentication:user_login'),
self.user_cred,
format='json')
# Create token
token = res.data['token']
# Like an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/like/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(response.data['message'], "The article does not exist.")
def test_if_user_can_dislike_unexisting_article(self):
"""Test if the user can like an article that does not exist"""
# Register user
self.register_user()
# Login user
res = self.client.post(
reverse('authentication:user_login'),
self.user_cred,
format='json')
# Create token
token = res.data['token']
# Dislike an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/dislike/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(response.data['message'], "The article does not exist.")
def test_if_user_liking_is_successful(self):
"""Test if user liking is successful, if like does not exist"""
# Register user
self.register_user()
# Login user
res = self.client.post(
reverse('authentication:user_login'),
self.user_cred,
format='json')
# Create token
token = res.data['token']
# Create article
response = self.create_article(token, self.article)
self.assertEquals(status.HTTP_201_CREATED, response.status_code)
# Like an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/like/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "Added to Liked articles")
def test_successful_article_disliking(self):
"""Test a successful disliking of an article"""
# Register user
self.register_user()
# Login user
res = self.client.post(
reverse('authentication:user_login'),
self.user_cred,
format='json')
# Create token
token = res.data['token']
# Create article
response = self.create_article(token, self.article)
self.assertEquals(status.HTTP_201_CREATED, response.status_code)
# Dislike an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/dislike/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "You Dislike this Article")
def test_response_of_adding_a_like_after_adding_a_dislike(self):
"""Test the response of adding a like after adding a dislike"""
# Register user
self.register_user()
# Login user
res = self.client.post(
reverse('authentication:user_login'),
self.user_cred,
format='json')
# Create token
token = res.data['token']
# Create article
response = self.create_article(token, self.article)
self.assertEquals(status.HTTP_201_CREATED, response.status_code)
# Dislike an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/dislike/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "You Dislike this Article")
# Like an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/like/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "Removed from dislike and Added to Liked articles")
def test_response_of_adding_a_dislike_after_adding_a_like(self):
"""Test the response of adding a dislike after adding a like """
# Register user
self.register_user()
# Login user
res = self.client.post(
reverse('authentication:user_login'),
self.user_cred,
format='json')
# Create token
token = res.data['token']
# Create article
response = self.create_article(token, self.article)
self.assertEquals(status.HTTP_201_CREATED, response.status_code)
# Like an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/like/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "Added to Liked articles")
# Dislike an article
response = self.client.put(path='/api/articles/how-to-train-your-dragon/dislike/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "Removed from Liked Articles and Added to Disliked articles")
def test_response_of_double_liking(self):
"""Test the response of liking an article twice"""
# Register user
self.register_user()
# Login user
res = self.client.post(
reverse('authentication:user_login'),
self.user_cred,
format='json')
# Create token
token = res.data['token']
# Create article
response = self.create_article(token, self.article)
self.assertEquals(status.HTTP_201_CREATED, response.status_code)
# Like an article, first request
response = self.client.put(path='/api/articles/how-to-train-your-dragon/like/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "Added to Liked articles")
# Like an article, second request
response = self.client.put(path='/api/articles/how-to-train-your-dragon/like/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "You no longer like this article")
def test_response_of_double_disliking(self):
"""Test the response of disliking an article twice"""
# Register user
self.register_user()
# Login user
res = self.client.post(
reverse('authentication:user_login'),
self.user_cred,
format='json')
# Create token
token = res.data['token']
# Create article
response = self.create_article(token, self.article)
self.assertEquals(status.HTTP_201_CREATED, response.status_code)
# Dislike an article, first request
response = self.client.put(path='/api/articles/how-to-train-your-dragon/dislike/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "You Dislike this Article")
# Dislike an article, second request
response = self.client.put(path='/api/articles/how-to-train-your-dragon/dislike/',
HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['message'], "You no longer dislike this article")
| 45.410959
| 112
| 0.619708
| 1,130
| 9,945
| 5.30531
| 0.10531
| 0.070058
| 0.107423
| 0.037364
| 0.907089
| 0.885405
| 0.859383
| 0.83603
| 0.83603
| 0.809341
| 0
| 0.01525
| 0.274711
| 9,945
| 218
| 113
| 45.619266
| 0.815888
| 0.127903
| 0
| 0.739726
| 0
| 0
| 0.20947
| 0.107884
| 0
| 0
| 0
| 0
| 0.232877
| 1
| 0.068493
| false
| 0
| 0.020548
| 0
| 0.10274
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d7c8d581e3552b82823766149017f68b12e31d8
| 117,098
|
py
|
Python
|
kubernetes/test/test_com_coreos_monitoring_v1_prometheus_list.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_com_coreos_monitoring_v1_prometheus_list.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_com_coreos_monitoring_v1_prometheus_list.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kubernetes.client
from kubernetes.client.models.com_coreos_monitoring_v1_prometheus_list import ComCoreosMonitoringV1PrometheusList # noqa: E501
from kubernetes.client.rest import ApiException
class TestComCoreosMonitoringV1PrometheusList(unittest.TestCase):
"""ComCoreosMonitoringV1PrometheusList unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test ComCoreosMonitoringV1PrometheusList
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_list.ComCoreosMonitoringV1PrometheusList() # noqa: E501
if include_optional :
return ComCoreosMonitoringV1PrometheusList(
api_version = '0',
items = [
kubernetes.client.models.com/coreos/monitoring/v1/prometheus.com.coreos.monitoring.v1.Prometheus(
api_version = '0',
kind = '0',
metadata = kubernetes.client.models.v1/object_meta_v2.v1.ObjectMeta_v2(
annotations = {
'key' : '0'
},
cluster_name = '0',
creation_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
deletion_grace_period_seconds = 56,
deletion_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
finalizers = [
'0'
],
generate_name = '0',
generation = 56,
labels = {
'key' : '0'
},
managed_fields = [
kubernetes.client.models.v1/managed_fields_entry.v1.ManagedFieldsEntry(
api_version = '0',
fields_type = '0',
fields_v1 = kubernetes.client.models.fields_v1.fieldsV1(),
manager = '0',
operation = '0',
time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), )
],
name = '0',
namespace = '0',
owner_references = [
kubernetes.client.models.v1/owner_reference_v2.v1.OwnerReference_v2(
api_version = '0',
block_owner_deletion = True,
controller = True,
kind = '0',
name = '0',
uid = '0', )
],
resource_version = '0',
self_link = '0',
uid = '0', ),
spec = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec.com_coreos_monitoring_v1_Prometheus_spec(
additional_alert_manager_configs = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_additional_alert_manager_configs.com_coreos_monitoring_v1_Prometheus_spec_additionalAlertManagerConfigs(
key = '0',
name = '0',
optional = True, ),
additional_alert_relabel_configs = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_additional_alert_relabel_configs.com_coreos_monitoring_v1_Prometheus_spec_additionalAlertRelabelConfigs(
key = '0',
name = '0',
optional = True, ),
additional_scrape_configs = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_additional_scrape_configs.com_coreos_monitoring_v1_Prometheus_spec_additionalScrapeConfigs(
key = '0',
name = '0',
optional = True, ),
affinity = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity.com_coreos_monitoring_v1_Alertmanager_spec_affinity(
node_affinity = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity(
preferred_during_scheduling_ignored_during_execution = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_preferred_during_scheduling_ignored_during_execution.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_preferredDuringSchedulingIgnoredDuringExecution(
preference = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_preference.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_preference(
match_expressions = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_preference_match_expressions.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_preference_matchExpressions(
key = '0',
operator = '0',
values = [
'0'
], )
],
match_fields = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_preference_match_expressions.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_preference_matchExpressions(
key = '0',
operator = '0', )
], ),
weight = 56, )
],
required_during_scheduling_ignored_during_execution = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_required_during_scheduling_ignored_during_execution.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_requiredDuringSchedulingIgnoredDuringExecution(
node_selector_terms = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_required_during_scheduling_ignored_during_execution_node_selector_terms.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_requiredDuringSchedulingIgnoredDuringExecution_nodeSelectorTerms()
], ), ),
pod_affinity = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_pod_affinity.com_coreos_monitoring_v1_Alertmanager_spec_affinity_podAffinity(),
pod_anti_affinity = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_pod_anti_affinity.com_coreos_monitoring_v1_Alertmanager_spec_affinity_podAntiAffinity(), ),
alerting = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_alerting.com_coreos_monitoring_v1_Prometheus_spec_alerting(
alertmanagers = [
kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_alerting_alertmanagers.com_coreos_monitoring_v1_Prometheus_spec_alerting_alertmanagers(
api_version = '0',
authorization = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_alerting_authorization.com_coreos_monitoring_v1_Prometheus_spec_alerting_authorization(
credentials = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_authorization_credentials.com_coreos_monitoring_v1_PodMonitor_spec_authorization_credentials(
key = '0',
name = '0',
optional = True, ),
type = '0', ),
bearer_token_file = '0',
name = '0',
namespace = '0',
path_prefix = '0',
port = kubernetes.client.models.port.port(),
scheme = '0',
timeout = '0',
tls_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_alerting_tls_config.com_coreos_monitoring_v1_Prometheus_spec_alerting_tlsConfig(
ca = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_tls_config_ca.com_coreos_monitoring_v1_PodMonitor_spec_tlsConfig_ca(
config_map = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2_client_id_config_map.com_coreos_monitoring_v1_PodMonitor_spec_oauth2_clientId_configMap(
key = '0',
name = '0',
optional = True, ),
secret = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2_client_id_secret.com_coreos_monitoring_v1_PodMonitor_spec_oauth2_clientId_secret(
key = '0',
name = '0',
optional = True, ), ),
ca_file = '0',
cert = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_tls_config_cert.com_coreos_monitoring_v1_PodMonitor_spec_tlsConfig_cert(),
cert_file = '0',
insecure_skip_verify = True,
key_file = '0',
key_secret = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_tls_config_key_secret.com_coreos_monitoring_v1_PodMonitor_spec_tlsConfig_keySecret(
key = '0',
name = '0',
optional = True, ),
server_name = '0', ), )
], ),
allow_overlapping_blocks = True,
apiserver_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_apiserver_config.com_coreos_monitoring_v1_Prometheus_spec_apiserverConfig(
basic_auth = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_apiserver_config_basic_auth.com_coreos_monitoring_v1_Prometheus_spec_apiserverConfig_basicAuth(
password = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_basic_auth_password.com_coreos_monitoring_v1_PodMonitor_spec_basicAuth_password(
key = '0',
name = '0',
optional = True, ),
username = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_basic_auth_username.com_coreos_monitoring_v1_PodMonitor_spec_basicAuth_username(
key = '0',
name = '0',
optional = True, ), ),
bearer_token = '0',
bearer_token_file = '0',
host = '0', ),
arbitrary_fs_access_through_s_ms = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_arbitrary_fs_access_through_s_ms.com_coreos_monitoring_v1_Prometheus_spec_arbitraryFSAccessThroughSMs(
deny = True, ),
base_image = '0',
config_maps = [
'0'
],
containers = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_containers.com_coreos_monitoring_v1_Alertmanager_spec_containers(
args = [
'0'
],
command = [
'0'
],
env = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_env.com_coreos_monitoring_v1_Alertmanager_spec_env(
name = '0',
value = '0',
value_from = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom(
config_map_key_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from_config_map_key_ref.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom_configMapKeyRef(
key = '0',
name = '0',
optional = True, ),
field_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from_field_ref.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom_fieldRef(
api_version = '0',
field_path = '0', ),
resource_field_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from_resource_field_ref.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom_resourceFieldRef(
container_name = '0',
divisor = kubernetes.client.models.divisor.divisor(),
resource = '0', ),
secret_key_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from_secret_key_ref.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom_secretKeyRef(
key = '0',
name = '0',
optional = True, ), ), )
],
env_from = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_env_from.com_coreos_monitoring_v1_Alertmanager_spec_envFrom(
config_map_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_config_map_ref.com_coreos_monitoring_v1_Alertmanager_spec_configMapRef(
name = '0',
optional = True, ),
prefix = '0',
secret_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_secret_ref.com_coreos_monitoring_v1_Alertmanager_spec_secretRef(
name = '0',
optional = True, ), )
],
image = '0',
image_pull_policy = '0',
lifecycle = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle(
post_start = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart(
exec = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start_exec.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart_exec(),
http_get = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start_http_get.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart_httpGet(
host = '0',
http_headers = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start_http_get_http_headers.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart_httpGet_httpHeaders(
name = '0',
value = '0', )
],
path = '0',
port = kubernetes.client.models.port.port(),
scheme = '0', ),
tcp_socket = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start_tcp_socket.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart_tcpSocket(
host = '0',
port = kubernetes.client.models.port.port(), ), ),
pre_stop = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_pre_stop.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_preStop(), ),
liveness_probe = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_liveness_probe.com_coreos_monitoring_v1_Alertmanager_spec_livenessProbe(
failure_threshold = 56,
initial_delay_seconds = 56,
period_seconds = 56,
success_threshold = 56,
timeout_seconds = 56, ),
name = '0',
ports = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_ports.com_coreos_monitoring_v1_Alertmanager_spec_ports(
container_port = 56,
host_ip = '0',
host_port = 56,
name = '0',
protocol = '0', )
],
readiness_probe = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_readiness_probe.com_coreos_monitoring_v1_Alertmanager_spec_readinessProbe(
failure_threshold = 56,
initial_delay_seconds = 56,
period_seconds = 56,
success_threshold = 56,
timeout_seconds = 56, ),
resources = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_resources.com_coreos_monitoring_v1_Alertmanager_spec_resources(
limits = {
'key' : None
},
requests = {
'key' : None
}, ),
security_context = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context.com_coreos_monitoring_v1_Alertmanager_spec_securityContext(
allow_privilege_escalation = True,
capabilities = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_capabilities.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_capabilities(
add = [
'0'
],
drop = [
'0'
], ),
privileged = True,
proc_mount = '0',
read_only_root_filesystem = True,
run_as_group = 56,
run_as_non_root = True,
run_as_user = 56,
se_linux_options = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_se_linux_options.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_seLinuxOptions(
level = '0',
role = '0',
type = '0',
user = '0', ),
windows_options = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_windows_options.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_windowsOptions(
gmsa_credential_spec = '0',
gmsa_credential_spec_name = '0',
run_as_user_name = '0', ), ),
startup_probe = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_startup_probe.com_coreos_monitoring_v1_Alertmanager_spec_startupProbe(
failure_threshold = 56,
initial_delay_seconds = 56,
period_seconds = 56,
success_threshold = 56,
timeout_seconds = 56, ),
stdin = True,
stdin_once = True,
termination_message_path = '0',
termination_message_policy = '0',
tty = True,
volume_devices = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_volume_devices.com_coreos_monitoring_v1_Alertmanager_spec_volumeDevices(
device_path = '0',
name = '0', )
],
volume_mounts = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_volume_mounts.com_coreos_monitoring_v1_Alertmanager_spec_volumeMounts(
mount_path = '0',
mount_propagation = '0',
name = '0',
read_only = True,
sub_path = '0',
sub_path_expr = '0', )
],
working_dir = '0', )
],
disable_compaction = True,
enable_admin_api = True,
enable_features = [
'0'
],
enforced_label_limit = 56,
enforced_label_name_length_limit = 56,
enforced_label_value_length_limit = 56,
enforced_namespace_label = '0',
enforced_sample_limit = 56,
enforced_target_limit = 56,
evaluation_interval = '0',
external_labels = {
'key' : '0'
},
external_url = '0',
ignore_namespace_selectors = True,
image = '0',
image_pull_secrets = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_image_pull_secrets.com_coreos_monitoring_v1_Alertmanager_spec_imagePullSecrets(
name = '0', )
],
init_containers = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_containers.com_coreos_monitoring_v1_Alertmanager_spec_containers(
image = '0',
image_pull_policy = '0',
name = '0',
stdin = True,
stdin_once = True,
termination_message_path = '0',
termination_message_policy = '0',
tty = True,
working_dir = '0', )
],
listen_local = True,
log_format = '0',
log_level = '0',
node_selector = {
'key' : '0'
},
override_honor_labels = True,
override_honor_timestamps = True,
paused = True,
pod_metadata = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_pod_metadata.com_coreos_monitoring_v1_Prometheus_spec_podMetadata(
name = '0', ),
pod_monitor_namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_pod_monitor_namespace_selector.com_coreos_monitoring_v1_Prometheus_spec_podMonitorNamespaceSelector(
match_labels = {
'key' : '0'
}, ),
pod_monitor_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_pod_monitor_selector.com_coreos_monitoring_v1_Prometheus_spec_podMonitorSelector(),
port_name = '0',
priority_class_name = '0',
probe_namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_probe_namespace_selector.com_coreos_monitoring_v1_Prometheus_spec_probeNamespaceSelector(),
probe_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_probe_selector.com_coreos_monitoring_v1_Prometheus_spec_probeSelector(),
prometheus_external_label_name = '0',
prometheus_rules_excluded_from_enforce = [
kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_prometheus_rules_excluded_from_enforce.com_coreos_monitoring_v1_Prometheus_spec_prometheusRulesExcludedFromEnforce(
rule_name = '0',
rule_namespace = '0', )
],
query = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_query.com_coreos_monitoring_v1_Prometheus_spec_query(
lookback_delta = '0',
max_concurrency = 56,
max_samples = 56,
timeout = '0', ),
query_log_file = '0',
remote_read = [
kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_remote_read.com_coreos_monitoring_v1_Prometheus_spec_remoteRead(
bearer_token = '0',
bearer_token_file = '0',
name = '0',
oauth2 = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2.com_coreos_monitoring_v1_PodMonitor_spec_oauth2(
kubernetes.client_id = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2_client_id.com_coreos_monitoring_v1_PodMonitor_spec_oauth2_clientId(),
kubernetes.client_secret = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2_client_secret.com_coreos_monitoring_v1_PodMonitor_spec_oauth2_clientSecret(
key = '0',
name = '0',
optional = True, ),
endpoint_params = {
'key' : '0'
},
scopes = [
'0'
],
token_url = '0', ),
proxy_url = '0',
read_recent = True,
remote_timeout = '0',
required_matchers = {
'key' : '0'
},
url = '0', )
],
remote_write = [
kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_remote_write.com_coreos_monitoring_v1_Prometheus_spec_remoteWrite(
bearer_token = '0',
bearer_token_file = '0',
headers = {
'key' : '0'
},
metadata_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_metadata_config.com_coreos_monitoring_v1_Prometheus_spec_metadataConfig(
send = True,
send_interval = '0', ),
name = '0',
proxy_url = '0',
queue_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_queue_config.com_coreos_monitoring_v1_Prometheus_spec_queueConfig(
batch_send_deadline = '0',
capacity = 56,
max_backoff = '0',
max_retries = 56,
max_samples_per_send = 56,
max_shards = 56,
min_backoff = '0',
min_shards = 56, ),
remote_timeout = '0',
send_exemplars = True,
url = '0',
write_relabel_configs = [
kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_metric_relabelings.com_coreos_monitoring_v1_PodMonitor_spec_metricRelabelings(
action = '0',
modulus = 56,
regex = '0',
replacement = '0',
separator = '0',
source_labels = [
'0'
],
target_label = '0', )
], )
],
replica_external_label_name = '0',
replicas = 56,
resources = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_resources_1.com_coreos_monitoring_v1_Alertmanager_spec_resources_1(),
retention = '0',
retention_size = '0',
route_prefix = '0',
rule_namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_rule_namespace_selector.com_coreos_monitoring_v1_Prometheus_spec_ruleNamespaceSelector(),
rule_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_rule_selector.com_coreos_monitoring_v1_Prometheus_spec_ruleSelector(),
rules = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_rules.com_coreos_monitoring_v1_Prometheus_spec_rules(
alert = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_rules_alert.com_coreos_monitoring_v1_Prometheus_spec_rules_alert(
for_grace_period = '0',
for_outage_tolerance = '0',
resend_delay = '0', ), ),
scrape_interval = '0',
scrape_timeout = '0',
secrets = [
'0'
],
security_context = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_1.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_1(
fs_group = 56,
fs_group_change_policy = '0',
run_as_group = 56,
run_as_non_root = True,
run_as_user = 56,
supplemental_groups = [
56
],
sysctls = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_1_sysctls.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_1_sysctls(
name = '0',
value = '0', )
], ),
service_account_name = '0',
service_monitor_namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_service_monitor_namespace_selector.com_coreos_monitoring_v1_Prometheus_spec_serviceMonitorNamespaceSelector(),
service_monitor_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_service_monitor_selector.com_coreos_monitoring_v1_Prometheus_spec_serviceMonitorSelector(),
sha = '0',
shards = 56,
storage = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_storage.com_coreos_monitoring_v1_Prometheus_spec_storage(
disable_mount_sub_path = True,
empty_dir = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storage_empty_dir.com_coreos_monitoring_v1_Alertmanager_spec_storage_emptyDir(
medium = '0',
size_limit = kubernetes.client.models.size_limit.sizeLimit(), ),
volume_claim_template = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storage_volume_claim_template.com_coreos_monitoring_v1_Alertmanager_spec_storage_volumeClaimTemplate(
api_version = '0',
kind = '0',
status = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storage_volume_claim_template_status.com_coreos_monitoring_v1_Alertmanager_spec_storage_volumeClaimTemplate_status(
access_modes = [
'0'
],
capacity = {
'key' : None
},
conditions = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storage_volume_claim_template_status_conditions.com_coreos_monitoring_v1_Alertmanager_spec_storage_volumeClaimTemplate_status_conditions(
last_probe_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
last_transition_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
message = '0',
reason = '0',
status = '0',
type = '0', )
],
phase = '0', ), ), ),
tag = '0',
thanos = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_thanos.com_coreos_monitoring_v1_Prometheus_spec_thanos(
base_image = '0',
grpc_server_tls_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_thanos_grpc_server_tls_config.com_coreos_monitoring_v1_Prometheus_spec_thanos_grpcServerTlsConfig(
ca_file = '0',
cert_file = '0',
insecure_skip_verify = True,
key_file = '0',
server_name = '0', ),
image = '0',
listen_local = True,
log_format = '0',
log_level = '0',
min_time = '0',
object_storage_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_thanos_object_storage_config.com_coreos_monitoring_v1_Prometheus_spec_thanos_objectStorageConfig(
key = '0',
name = '0',
optional = True, ),
object_storage_config_file = '0',
ready_timeout = '0',
sha = '0',
tag = '0',
tracing_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_thanos_tracing_config.com_coreos_monitoring_v1_Prometheus_spec_thanos_tracingConfig(
key = '0',
name = '0',
optional = True, ),
tracing_config_file = '0',
version = '0', ),
tolerations = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_tolerations.com_coreos_monitoring_v1_Alertmanager_spec_tolerations(
effect = '0',
key = '0',
operator = '0',
toleration_seconds = 56,
value = '0', )
],
topology_spread_constraints = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_topology_spread_constraints.com_coreos_monitoring_v1_Alertmanager_spec_topologySpreadConstraints(
label_selector = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_label_selector.com_coreos_monitoring_v1_Alertmanager_spec_labelSelector(),
max_skew = 56,
topology_key = '0',
when_unsatisfiable = '0', )
],
version = '0',
volume_mounts = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_volume_mounts.com_coreos_monitoring_v1_Alertmanager_spec_volumeMounts(
mount_path = '0',
mount_propagation = '0',
name = '0',
read_only = True,
sub_path = '0',
sub_path_expr = '0', )
],
volumes = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_volumes.com_coreos_monitoring_v1_Alertmanager_spec_volumes(
aws_elastic_block_store = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_aws_elastic_block_store.com_coreos_monitoring_v1_Alertmanager_spec_awsElasticBlockStore(
fs_type = '0',
partition = 56,
read_only = True,
volume_id = '0', ),
azure_disk = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_azure_disk.com_coreos_monitoring_v1_Alertmanager_spec_azureDisk(
caching_mode = '0',
disk_name = '0',
disk_uri = '0',
fs_type = '0',
kind = '0',
read_only = True, ),
azure_file = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_azure_file.com_coreos_monitoring_v1_Alertmanager_spec_azureFile(
read_only = True,
secret_name = '0',
share_name = '0', ),
cephfs = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_cephfs.com_coreos_monitoring_v1_Alertmanager_spec_cephfs(
monitors = [
'0'
],
path = '0',
read_only = True,
secret_file = '0',
user = '0', ),
cinder = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_cinder.com_coreos_monitoring_v1_Alertmanager_spec_cinder(
fs_type = '0',
read_only = True,
volume_id = '0', ),
csi = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_csi.com_coreos_monitoring_v1_Alertmanager_spec_csi(
driver = '0',
fs_type = '0',
node_publish_secret_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_csi_node_publish_secret_ref.com_coreos_monitoring_v1_Alertmanager_spec_csi_nodePublishSecretRef(
name = '0', ),
read_only = True,
volume_attributes = {
'key' : '0'
}, ),
downward_api = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_downward_api.com_coreos_monitoring_v1_Alertmanager_spec_downwardAPI(
default_mode = 56,
items = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_downward_api_items.com_coreos_monitoring_v1_Alertmanager_spec_downwardAPI_items(
mode = 56,
path = '0', )
], ),
fc = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_fc.com_coreos_monitoring_v1_Alertmanager_spec_fc(
fs_type = '0',
lun = 56,
read_only = True,
target_ww_ns = [
'0'
],
wwids = [
'0'
], ),
flex_volume = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_flex_volume.com_coreos_monitoring_v1_Alertmanager_spec_flexVolume(
driver = '0',
fs_type = '0',
options = {
'key' : '0'
},
read_only = True, ),
flocker = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_flocker.com_coreos_monitoring_v1_Alertmanager_spec_flocker(
dataset_name = '0',
dataset_uuid = '0', ),
gce_persistent_disk = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_gce_persistent_disk.com_coreos_monitoring_v1_Alertmanager_spec_gcePersistentDisk(
fs_type = '0',
partition = 56,
pd_name = '0',
read_only = True, ),
git_repo = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_git_repo.com_coreos_monitoring_v1_Alertmanager_spec_gitRepo(
directory = '0',
repository = '0',
revision = '0', ),
glusterfs = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_glusterfs.com_coreos_monitoring_v1_Alertmanager_spec_glusterfs(
endpoints = '0',
path = '0',
read_only = True, ),
host_path = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_host_path.com_coreos_monitoring_v1_Alertmanager_spec_hostPath(
path = '0',
type = '0', ),
iscsi = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_iscsi.com_coreos_monitoring_v1_Alertmanager_spec_iscsi(
chap_auth_discovery = True,
chap_auth_session = True,
fs_type = '0',
initiator_name = '0',
iqn = '0',
iscsi_interface = '0',
lun = 56,
portals = [
'0'
],
read_only = True,
target_portal = '0', ),
name = '0',
nfs = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_nfs.com_coreos_monitoring_v1_Alertmanager_spec_nfs(
path = '0',
read_only = True,
server = '0', ),
persistent_volume_claim = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_persistent_volume_claim.com_coreos_monitoring_v1_Alertmanager_spec_persistentVolumeClaim(
claim_name = '0',
read_only = True, ),
photon_persistent_disk = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_photon_persistent_disk.com_coreos_monitoring_v1_Alertmanager_spec_photonPersistentDisk(
fs_type = '0',
pd_id = '0', ),
portworx_volume = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_portworx_volume.com_coreos_monitoring_v1_Alertmanager_spec_portworxVolume(
fs_type = '0',
read_only = True,
volume_id = '0', ),
projected = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_projected.com_coreos_monitoring_v1_Alertmanager_spec_projected(
default_mode = 56,
sources = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_projected_sources.com_coreos_monitoring_v1_Alertmanager_spec_projected_sources(
service_account_token = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_projected_service_account_token.com_coreos_monitoring_v1_Alertmanager_spec_projected_serviceAccountToken(
audience = '0',
expiration_seconds = 56,
path = '0', ), )
], ),
quobyte = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_quobyte.com_coreos_monitoring_v1_Alertmanager_spec_quobyte(
group = '0',
read_only = True,
registry = '0',
tenant = '0',
user = '0',
volume = '0', ),
rbd = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_rbd.com_coreos_monitoring_v1_Alertmanager_spec_rbd(
fs_type = '0',
image = '0',
keyring = '0',
monitors = [
'0'
],
pool = '0',
read_only = True,
user = '0', ),
scale_io = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_scale_io.com_coreos_monitoring_v1_Alertmanager_spec_scaleIO(
fs_type = '0',
gateway = '0',
protection_domain = '0',
read_only = True,
secret_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_scale_io_secret_ref.com_coreos_monitoring_v1_Alertmanager_spec_scaleIO_secretRef(
name = '0', ),
ssl_enabled = True,
storage_mode = '0',
storage_pool = '0',
system = '0',
volume_name = '0', ),
storageos = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storageos.com_coreos_monitoring_v1_Alertmanager_spec_storageos(
fs_type = '0',
read_only = True,
volume_name = '0',
volume_namespace = '0', ),
vsphere_volume = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_vsphere_volume.com_coreos_monitoring_v1_Alertmanager_spec_vsphereVolume(
fs_type = '0',
storage_policy_id = '0',
storage_policy_name = '0',
volume_path = '0', ), )
],
wal_compression = True,
web = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_web.com_coreos_monitoring_v1_Prometheus_spec_web(
page_title = '0', ), ),
status = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_status.com_coreos_monitoring_v1_Prometheus_status(
available_replicas = 56,
paused = True,
replicas = 56,
unavailable_replicas = 56,
updated_replicas = 56, ), )
],
kind = '0',
metadata = kubernetes.client.models.v1/list_meta.v1.ListMeta(
continue = '0',
remaining_item_count = 56,
resource_version = '0',
self_link = '0', )
)
else :
return ComCoreosMonitoringV1PrometheusList(
items = [
kubernetes.client.models.com/coreos/monitoring/v1/prometheus.com.coreos.monitoring.v1.Prometheus(
api_version = '0',
kind = '0',
metadata = kubernetes.client.models.v1/object_meta_v2.v1.ObjectMeta_v2(
annotations = {
'key' : '0'
},
cluster_name = '0',
creation_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
deletion_grace_period_seconds = 56,
deletion_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
finalizers = [
'0'
],
generate_name = '0',
generation = 56,
labels = {
'key' : '0'
},
managed_fields = [
kubernetes.client.models.v1/managed_fields_entry.v1.ManagedFieldsEntry(
api_version = '0',
fields_type = '0',
fields_v1 = kubernetes.client.models.fields_v1.fieldsV1(),
manager = '0',
operation = '0',
time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), )
],
name = '0',
namespace = '0',
owner_references = [
kubernetes.client.models.v1/owner_reference_v2.v1.OwnerReference_v2(
api_version = '0',
block_owner_deletion = True,
controller = True,
kind = '0',
name = '0',
uid = '0', )
],
resource_version = '0',
self_link = '0',
uid = '0', ),
spec = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec.com_coreos_monitoring_v1_Prometheus_spec(
additional_alert_manager_configs = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_additional_alert_manager_configs.com_coreos_monitoring_v1_Prometheus_spec_additionalAlertManagerConfigs(
key = '0',
name = '0',
optional = True, ),
additional_alert_relabel_configs = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_additional_alert_relabel_configs.com_coreos_monitoring_v1_Prometheus_spec_additionalAlertRelabelConfigs(
key = '0',
name = '0',
optional = True, ),
additional_scrape_configs = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_additional_scrape_configs.com_coreos_monitoring_v1_Prometheus_spec_additionalScrapeConfigs(
key = '0',
name = '0',
optional = True, ),
affinity = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity.com_coreos_monitoring_v1_Alertmanager_spec_affinity(
node_affinity = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity(
preferred_during_scheduling_ignored_during_execution = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_preferred_during_scheduling_ignored_during_execution.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_preferredDuringSchedulingIgnoredDuringExecution(
preference = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_preference.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_preference(
match_expressions = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_preference_match_expressions.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_preference_matchExpressions(
key = '0',
operator = '0',
values = [
'0'
], )
],
match_fields = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_preference_match_expressions.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_preference_matchExpressions(
key = '0',
operator = '0', )
], ),
weight = 56, )
],
required_during_scheduling_ignored_during_execution = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_required_during_scheduling_ignored_during_execution.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_requiredDuringSchedulingIgnoredDuringExecution(
node_selector_terms = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_node_affinity_required_during_scheduling_ignored_during_execution_node_selector_terms.com_coreos_monitoring_v1_Alertmanager_spec_affinity_nodeAffinity_requiredDuringSchedulingIgnoredDuringExecution_nodeSelectorTerms()
], ), ),
pod_affinity = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_pod_affinity.com_coreos_monitoring_v1_Alertmanager_spec_affinity_podAffinity(),
pod_anti_affinity = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_pod_anti_affinity.com_coreos_monitoring_v1_Alertmanager_spec_affinity_podAntiAffinity(), ),
alerting = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_alerting.com_coreos_monitoring_v1_Prometheus_spec_alerting(
alertmanagers = [
kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_alerting_alertmanagers.com_coreos_monitoring_v1_Prometheus_spec_alerting_alertmanagers(
api_version = '0',
authorization = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_alerting_authorization.com_coreos_monitoring_v1_Prometheus_spec_alerting_authorization(
credentials = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_authorization_credentials.com_coreos_monitoring_v1_PodMonitor_spec_authorization_credentials(
key = '0',
name = '0',
optional = True, ),
type = '0', ),
bearer_token_file = '0',
name = '0',
namespace = '0',
path_prefix = '0',
port = kubernetes.client.models.port.port(),
scheme = '0',
timeout = '0',
tls_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_alerting_tls_config.com_coreos_monitoring_v1_Prometheus_spec_alerting_tlsConfig(
ca = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_tls_config_ca.com_coreos_monitoring_v1_PodMonitor_spec_tlsConfig_ca(
config_map = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2_client_id_config_map.com_coreos_monitoring_v1_PodMonitor_spec_oauth2_clientId_configMap(
key = '0',
name = '0',
optional = True, ),
secret = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2_client_id_secret.com_coreos_monitoring_v1_PodMonitor_spec_oauth2_clientId_secret(
key = '0',
name = '0',
optional = True, ), ),
ca_file = '0',
cert = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_tls_config_cert.com_coreos_monitoring_v1_PodMonitor_spec_tlsConfig_cert(),
cert_file = '0',
insecure_skip_verify = True,
key_file = '0',
key_secret = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_tls_config_key_secret.com_coreos_monitoring_v1_PodMonitor_spec_tlsConfig_keySecret(
key = '0',
name = '0',
optional = True, ),
server_name = '0', ), )
], ),
allow_overlapping_blocks = True,
apiserver_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_apiserver_config.com_coreos_monitoring_v1_Prometheus_spec_apiserverConfig(
basic_auth = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_apiserver_config_basic_auth.com_coreos_monitoring_v1_Prometheus_spec_apiserverConfig_basicAuth(
password = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_basic_auth_password.com_coreos_monitoring_v1_PodMonitor_spec_basicAuth_password(
key = '0',
name = '0',
optional = True, ),
username = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_basic_auth_username.com_coreos_monitoring_v1_PodMonitor_spec_basicAuth_username(
key = '0',
name = '0',
optional = True, ), ),
bearer_token = '0',
bearer_token_file = '0',
host = '0', ),
arbitrary_fs_access_through_s_ms = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_arbitrary_fs_access_through_s_ms.com_coreos_monitoring_v1_Prometheus_spec_arbitraryFSAccessThroughSMs(
deny = True, ),
base_image = '0',
config_maps = [
'0'
],
containers = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_containers.com_coreos_monitoring_v1_Alertmanager_spec_containers(
args = [
'0'
],
command = [
'0'
],
env = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_env.com_coreos_monitoring_v1_Alertmanager_spec_env(
name = '0',
value = '0',
value_from = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom(
config_map_key_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from_config_map_key_ref.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom_configMapKeyRef(
key = '0',
name = '0',
optional = True, ),
field_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from_field_ref.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom_fieldRef(
api_version = '0',
field_path = '0', ),
resource_field_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from_resource_field_ref.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom_resourceFieldRef(
container_name = '0',
divisor = kubernetes.client.models.divisor.divisor(),
resource = '0', ),
secret_key_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_value_from_secret_key_ref.com_coreos_monitoring_v1_Alertmanager_spec_valueFrom_secretKeyRef(
key = '0',
name = '0',
optional = True, ), ), )
],
env_from = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_env_from.com_coreos_monitoring_v1_Alertmanager_spec_envFrom(
config_map_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_config_map_ref.com_coreos_monitoring_v1_Alertmanager_spec_configMapRef(
name = '0',
optional = True, ),
prefix = '0',
secret_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_secret_ref.com_coreos_monitoring_v1_Alertmanager_spec_secretRef(
name = '0',
optional = True, ), )
],
image = '0',
image_pull_policy = '0',
lifecycle = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle(
post_start = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart(
exec = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start_exec.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart_exec(),
http_get = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start_http_get.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart_httpGet(
host = '0',
http_headers = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start_http_get_http_headers.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart_httpGet_httpHeaders(
name = '0',
value = '0', )
],
path = '0',
port = kubernetes.client.models.port.port(),
scheme = '0', ),
tcp_socket = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_post_start_tcp_socket.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_postStart_tcpSocket(
host = '0',
port = kubernetes.client.models.port.port(), ), ),
pre_stop = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_lifecycle_pre_stop.com_coreos_monitoring_v1_Alertmanager_spec_lifecycle_preStop(), ),
liveness_probe = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_liveness_probe.com_coreos_monitoring_v1_Alertmanager_spec_livenessProbe(
failure_threshold = 56,
initial_delay_seconds = 56,
period_seconds = 56,
success_threshold = 56,
timeout_seconds = 56, ),
name = '0',
ports = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_ports.com_coreos_monitoring_v1_Alertmanager_spec_ports(
container_port = 56,
host_ip = '0',
host_port = 56,
name = '0',
protocol = '0', )
],
readiness_probe = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_readiness_probe.com_coreos_monitoring_v1_Alertmanager_spec_readinessProbe(
failure_threshold = 56,
initial_delay_seconds = 56,
period_seconds = 56,
success_threshold = 56,
timeout_seconds = 56, ),
resources = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_resources.com_coreos_monitoring_v1_Alertmanager_spec_resources(
limits = {
'key' : None
},
requests = {
'key' : None
}, ),
security_context = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context.com_coreos_monitoring_v1_Alertmanager_spec_securityContext(
allow_privilege_escalation = True,
capabilities = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_capabilities.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_capabilities(
add = [
'0'
],
drop = [
'0'
], ),
privileged = True,
proc_mount = '0',
read_only_root_filesystem = True,
run_as_group = 56,
run_as_non_root = True,
run_as_user = 56,
se_linux_options = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_se_linux_options.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_seLinuxOptions(
level = '0',
role = '0',
type = '0',
user = '0', ),
windows_options = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_windows_options.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_windowsOptions(
gmsa_credential_spec = '0',
gmsa_credential_spec_name = '0',
run_as_user_name = '0', ), ),
startup_probe = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_startup_probe.com_coreos_monitoring_v1_Alertmanager_spec_startupProbe(
failure_threshold = 56,
initial_delay_seconds = 56,
period_seconds = 56,
success_threshold = 56,
timeout_seconds = 56, ),
stdin = True,
stdin_once = True,
termination_message_path = '0',
termination_message_policy = '0',
tty = True,
volume_devices = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_volume_devices.com_coreos_monitoring_v1_Alertmanager_spec_volumeDevices(
device_path = '0',
name = '0', )
],
volume_mounts = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_volume_mounts.com_coreos_monitoring_v1_Alertmanager_spec_volumeMounts(
mount_path = '0',
mount_propagation = '0',
name = '0',
read_only = True,
sub_path = '0',
sub_path_expr = '0', )
],
working_dir = '0', )
],
disable_compaction = True,
enable_admin_api = True,
enable_features = [
'0'
],
enforced_label_limit = 56,
enforced_label_name_length_limit = 56,
enforced_label_value_length_limit = 56,
enforced_namespace_label = '0',
enforced_sample_limit = 56,
enforced_target_limit = 56,
evaluation_interval = '0',
external_labels = {
'key' : '0'
},
external_url = '0',
ignore_namespace_selectors = True,
image = '0',
image_pull_secrets = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_image_pull_secrets.com_coreos_monitoring_v1_Alertmanager_spec_imagePullSecrets(
name = '0', )
],
init_containers = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_containers.com_coreos_monitoring_v1_Alertmanager_spec_containers(
image = '0',
image_pull_policy = '0',
name = '0',
stdin = True,
stdin_once = True,
termination_message_path = '0',
termination_message_policy = '0',
tty = True,
working_dir = '0', )
],
listen_local = True,
log_format = '0',
log_level = '0',
node_selector = {
'key' : '0'
},
override_honor_labels = True,
override_honor_timestamps = True,
paused = True,
pod_metadata = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_pod_metadata.com_coreos_monitoring_v1_Prometheus_spec_podMetadata(
name = '0', ),
pod_monitor_namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_pod_monitor_namespace_selector.com_coreos_monitoring_v1_Prometheus_spec_podMonitorNamespaceSelector(
match_labels = {
'key' : '0'
}, ),
pod_monitor_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_pod_monitor_selector.com_coreos_monitoring_v1_Prometheus_spec_podMonitorSelector(),
port_name = '0',
priority_class_name = '0',
probe_namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_probe_namespace_selector.com_coreos_monitoring_v1_Prometheus_spec_probeNamespaceSelector(),
probe_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_probe_selector.com_coreos_monitoring_v1_Prometheus_spec_probeSelector(),
prometheus_external_label_name = '0',
prometheus_rules_excluded_from_enforce = [
kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_prometheus_rules_excluded_from_enforce.com_coreos_monitoring_v1_Prometheus_spec_prometheusRulesExcludedFromEnforce(
rule_name = '0',
rule_namespace = '0', )
],
query = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_query.com_coreos_monitoring_v1_Prometheus_spec_query(
lookback_delta = '0',
max_concurrency = 56,
max_samples = 56,
timeout = '0', ),
query_log_file = '0',
remote_read = [
kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_remote_read.com_coreos_monitoring_v1_Prometheus_spec_remoteRead(
bearer_token = '0',
bearer_token_file = '0',
name = '0',
oauth2 = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2.com_coreos_monitoring_v1_PodMonitor_spec_oauth2(
kubernetes.client_id = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2_client_id.com_coreos_monitoring_v1_PodMonitor_spec_oauth2_clientId(),
kubernetes.client_secret = kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_oauth2_client_secret.com_coreos_monitoring_v1_PodMonitor_spec_oauth2_clientSecret(
key = '0',
name = '0',
optional = True, ),
endpoint_params = {
'key' : '0'
},
scopes = [
'0'
],
token_url = '0', ),
proxy_url = '0',
read_recent = True,
remote_timeout = '0',
required_matchers = {
'key' : '0'
},
url = '0', )
],
remote_write = [
kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_remote_write.com_coreos_monitoring_v1_Prometheus_spec_remoteWrite(
bearer_token = '0',
bearer_token_file = '0',
headers = {
'key' : '0'
},
metadata_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_metadata_config.com_coreos_monitoring_v1_Prometheus_spec_metadataConfig(
send = True,
send_interval = '0', ),
name = '0',
proxy_url = '0',
queue_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_queue_config.com_coreos_monitoring_v1_Prometheus_spec_queueConfig(
batch_send_deadline = '0',
capacity = 56,
max_backoff = '0',
max_retries = 56,
max_samples_per_send = 56,
max_shards = 56,
min_backoff = '0',
min_shards = 56, ),
remote_timeout = '0',
send_exemplars = True,
url = '0',
write_relabel_configs = [
kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_metric_relabelings.com_coreos_monitoring_v1_PodMonitor_spec_metricRelabelings(
action = '0',
modulus = 56,
regex = '0',
replacement = '0',
separator = '0',
source_labels = [
'0'
],
target_label = '0', )
], )
],
replica_external_label_name = '0',
replicas = 56,
resources = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_resources_1.com_coreos_monitoring_v1_Alertmanager_spec_resources_1(),
retention = '0',
retention_size = '0',
route_prefix = '0',
rule_namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_rule_namespace_selector.com_coreos_monitoring_v1_Prometheus_spec_ruleNamespaceSelector(),
rule_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_rule_selector.com_coreos_monitoring_v1_Prometheus_spec_ruleSelector(),
rules = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_rules.com_coreos_monitoring_v1_Prometheus_spec_rules(
alert = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_rules_alert.com_coreos_monitoring_v1_Prometheus_spec_rules_alert(
for_grace_period = '0',
for_outage_tolerance = '0',
resend_delay = '0', ), ),
scrape_interval = '0',
scrape_timeout = '0',
secrets = [
'0'
],
security_context = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_1.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_1(
fs_group = 56,
fs_group_change_policy = '0',
run_as_group = 56,
run_as_non_root = True,
run_as_user = 56,
supplemental_groups = [
56
],
sysctls = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_security_context_1_sysctls.com_coreos_monitoring_v1_Alertmanager_spec_securityContext_1_sysctls(
name = '0',
value = '0', )
], ),
service_account_name = '0',
service_monitor_namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_service_monitor_namespace_selector.com_coreos_monitoring_v1_Prometheus_spec_serviceMonitorNamespaceSelector(),
service_monitor_selector = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_service_monitor_selector.com_coreos_monitoring_v1_Prometheus_spec_serviceMonitorSelector(),
sha = '0',
shards = 56,
storage = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_storage.com_coreos_monitoring_v1_Prometheus_spec_storage(
disable_mount_sub_path = True,
empty_dir = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storage_empty_dir.com_coreos_monitoring_v1_Alertmanager_spec_storage_emptyDir(
medium = '0',
size_limit = kubernetes.client.models.size_limit.sizeLimit(), ),
volume_claim_template = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storage_volume_claim_template.com_coreos_monitoring_v1_Alertmanager_spec_storage_volumeClaimTemplate(
api_version = '0',
kind = '0',
status = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storage_volume_claim_template_status.com_coreos_monitoring_v1_Alertmanager_spec_storage_volumeClaimTemplate_status(
access_modes = [
'0'
],
capacity = {
'key' : None
},
conditions = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storage_volume_claim_template_status_conditions.com_coreos_monitoring_v1_Alertmanager_spec_storage_volumeClaimTemplate_status_conditions(
last_probe_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
last_transition_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
message = '0',
reason = '0',
status = '0',
type = '0', )
],
phase = '0', ), ), ),
tag = '0',
thanos = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_thanos.com_coreos_monitoring_v1_Prometheus_spec_thanos(
base_image = '0',
grpc_server_tls_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_thanos_grpc_server_tls_config.com_coreos_monitoring_v1_Prometheus_spec_thanos_grpcServerTlsConfig(
ca_file = '0',
cert_file = '0',
insecure_skip_verify = True,
key_file = '0',
server_name = '0', ),
image = '0',
listen_local = True,
log_format = '0',
log_level = '0',
min_time = '0',
object_storage_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_thanos_object_storage_config.com_coreos_monitoring_v1_Prometheus_spec_thanos_objectStorageConfig(
key = '0',
name = '0',
optional = True, ),
object_storage_config_file = '0',
ready_timeout = '0',
sha = '0',
tag = '0',
tracing_config = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_thanos_tracing_config.com_coreos_monitoring_v1_Prometheus_spec_thanos_tracingConfig(
key = '0',
name = '0',
optional = True, ),
tracing_config_file = '0',
version = '0', ),
tolerations = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_tolerations.com_coreos_monitoring_v1_Alertmanager_spec_tolerations(
effect = '0',
key = '0',
operator = '0',
toleration_seconds = 56,
value = '0', )
],
topology_spread_constraints = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_topology_spread_constraints.com_coreos_monitoring_v1_Alertmanager_spec_topologySpreadConstraints(
label_selector = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_label_selector.com_coreos_monitoring_v1_Alertmanager_spec_labelSelector(),
max_skew = 56,
topology_key = '0',
when_unsatisfiable = '0', )
],
version = '0',
volume_mounts = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_volume_mounts.com_coreos_monitoring_v1_Alertmanager_spec_volumeMounts(
mount_path = '0',
mount_propagation = '0',
name = '0',
read_only = True,
sub_path = '0',
sub_path_expr = '0', )
],
volumes = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_volumes.com_coreos_monitoring_v1_Alertmanager_spec_volumes(
aws_elastic_block_store = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_aws_elastic_block_store.com_coreos_monitoring_v1_Alertmanager_spec_awsElasticBlockStore(
fs_type = '0',
partition = 56,
read_only = True,
volume_id = '0', ),
azure_disk = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_azure_disk.com_coreos_monitoring_v1_Alertmanager_spec_azureDisk(
caching_mode = '0',
disk_name = '0',
disk_uri = '0',
fs_type = '0',
kind = '0',
read_only = True, ),
azure_file = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_azure_file.com_coreos_monitoring_v1_Alertmanager_spec_azureFile(
read_only = True,
secret_name = '0',
share_name = '0', ),
cephfs = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_cephfs.com_coreos_monitoring_v1_Alertmanager_spec_cephfs(
monitors = [
'0'
],
path = '0',
read_only = True,
secret_file = '0',
user = '0', ),
cinder = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_cinder.com_coreos_monitoring_v1_Alertmanager_spec_cinder(
fs_type = '0',
read_only = True,
volume_id = '0', ),
csi = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_csi.com_coreos_monitoring_v1_Alertmanager_spec_csi(
driver = '0',
fs_type = '0',
node_publish_secret_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_csi_node_publish_secret_ref.com_coreos_monitoring_v1_Alertmanager_spec_csi_nodePublishSecretRef(
name = '0', ),
read_only = True,
volume_attributes = {
'key' : '0'
}, ),
downward_api = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_downward_api.com_coreos_monitoring_v1_Alertmanager_spec_downwardAPI(
default_mode = 56,
items = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_downward_api_items.com_coreos_monitoring_v1_Alertmanager_spec_downwardAPI_items(
mode = 56,
path = '0', )
], ),
fc = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_fc.com_coreos_monitoring_v1_Alertmanager_spec_fc(
fs_type = '0',
lun = 56,
read_only = True,
target_ww_ns = [
'0'
],
wwids = [
'0'
], ),
flex_volume = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_flex_volume.com_coreos_monitoring_v1_Alertmanager_spec_flexVolume(
driver = '0',
fs_type = '0',
options = {
'key' : '0'
},
read_only = True, ),
flocker = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_flocker.com_coreos_monitoring_v1_Alertmanager_spec_flocker(
dataset_name = '0',
dataset_uuid = '0', ),
gce_persistent_disk = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_gce_persistent_disk.com_coreos_monitoring_v1_Alertmanager_spec_gcePersistentDisk(
fs_type = '0',
partition = 56,
pd_name = '0',
read_only = True, ),
git_repo = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_git_repo.com_coreos_monitoring_v1_Alertmanager_spec_gitRepo(
directory = '0',
repository = '0',
revision = '0', ),
glusterfs = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_glusterfs.com_coreos_monitoring_v1_Alertmanager_spec_glusterfs(
endpoints = '0',
path = '0',
read_only = True, ),
host_path = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_host_path.com_coreos_monitoring_v1_Alertmanager_spec_hostPath(
path = '0',
type = '0', ),
iscsi = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_iscsi.com_coreos_monitoring_v1_Alertmanager_spec_iscsi(
chap_auth_discovery = True,
chap_auth_session = True,
fs_type = '0',
initiator_name = '0',
iqn = '0',
iscsi_interface = '0',
lun = 56,
portals = [
'0'
],
read_only = True,
target_portal = '0', ),
name = '0',
nfs = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_nfs.com_coreos_monitoring_v1_Alertmanager_spec_nfs(
path = '0',
read_only = True,
server = '0', ),
persistent_volume_claim = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_persistent_volume_claim.com_coreos_monitoring_v1_Alertmanager_spec_persistentVolumeClaim(
claim_name = '0',
read_only = True, ),
photon_persistent_disk = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_photon_persistent_disk.com_coreos_monitoring_v1_Alertmanager_spec_photonPersistentDisk(
fs_type = '0',
pd_id = '0', ),
portworx_volume = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_portworx_volume.com_coreos_monitoring_v1_Alertmanager_spec_portworxVolume(
fs_type = '0',
read_only = True,
volume_id = '0', ),
projected = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_projected.com_coreos_monitoring_v1_Alertmanager_spec_projected(
default_mode = 56,
sources = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_projected_sources.com_coreos_monitoring_v1_Alertmanager_spec_projected_sources(
service_account_token = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_projected_service_account_token.com_coreos_monitoring_v1_Alertmanager_spec_projected_serviceAccountToken(
audience = '0',
expiration_seconds = 56,
path = '0', ), )
], ),
quobyte = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_quobyte.com_coreos_monitoring_v1_Alertmanager_spec_quobyte(
group = '0',
read_only = True,
registry = '0',
tenant = '0',
user = '0',
volume = '0', ),
rbd = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_rbd.com_coreos_monitoring_v1_Alertmanager_spec_rbd(
fs_type = '0',
image = '0',
keyring = '0',
monitors = [
'0'
],
pool = '0',
read_only = True,
user = '0', ),
scale_io = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_scale_io.com_coreos_monitoring_v1_Alertmanager_spec_scaleIO(
fs_type = '0',
gateway = '0',
protection_domain = '0',
read_only = True,
secret_ref = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_scale_io_secret_ref.com_coreos_monitoring_v1_Alertmanager_spec_scaleIO_secretRef(
name = '0', ),
ssl_enabled = True,
storage_mode = '0',
storage_pool = '0',
system = '0',
volume_name = '0', ),
storageos = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_storageos.com_coreos_monitoring_v1_Alertmanager_spec_storageos(
fs_type = '0',
read_only = True,
volume_name = '0',
volume_namespace = '0', ),
vsphere_volume = kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_vsphere_volume.com_coreos_monitoring_v1_Alertmanager_spec_vsphereVolume(
fs_type = '0',
storage_policy_id = '0',
storage_policy_name = '0',
volume_path = '0', ), )
],
wal_compression = True,
web = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_spec_web.com_coreos_monitoring_v1_Prometheus_spec_web(
page_title = '0', ), ),
status = kubernetes.client.models.com_coreos_monitoring_v1_prometheus_status.com_coreos_monitoring_v1_Prometheus_status(
available_replicas = 56,
paused = True,
replicas = 56,
unavailable_replicas = 56,
updated_replicas = 56, ), )
],
)
def testComCoreosMonitoringV1PrometheusList(self):
"""Test ComCoreosMonitoringV1PrometheusList"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 81.374566
| 345
| 0.457027
| 8,455
| 117,098
| 5.773743
| 0.064459
| 0.096237
| 0.203167
| 0.224553
| 0.978716
| 0.977446
| 0.976934
| 0.976176
| 0.976094
| 0.973759
| 0
| 0.026765
| 0.492681
| 117,098
| 1,438
| 346
| 81.431154
| 0.794987
| 0.001272
| 0
| 0.969504
| 1
| 0
| 0.009512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.002837
| 0.004255
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
4d86d35d2da0a26cbf2167b24a6d4dc2fedc68b6
| 754
|
py
|
Python
|
experiments/train/setups/argparse.py
|
didriknielsen/pixelcnn_flow
|
9030f6a66d5ff83d7d299541ed55b20b20bb9a15
|
[
"MIT"
] | 25
|
2020-02-12T00:35:48.000Z
|
2021-09-18T14:30:43.000Z
|
experiments/train/setups/argparse.py
|
didriknielsen/pixelcnn_flow
|
9030f6a66d5ff83d7d299541ed55b20b20bb9a15
|
[
"MIT"
] | 1
|
2021-08-05T10:00:04.000Z
|
2021-08-10T11:11:16.000Z
|
experiments/train/setups/argparse.py
|
didriknielsen/pixelcnn_flow
|
9030f6a66d5ff83d7d299541ed55b20b20bb9a15
|
[
"MIT"
] | null | null | null |
import torch
def prep_int(arg, num, allow_none=False):
if allow_none:
if arg is None: arg = [arg] * num
if isinstance(arg, int): arg = [arg] * num
assert len(arg) == num
return arg
def prep_float(arg, num, allow_none=False):
if allow_none:
if arg is None: arg = [arg] * num
if isinstance(arg, float): arg = [arg] * num
assert len(arg) == num
return arg
def prep_str(arg, num):
try:
arg = eval(arg)
assert isinstance(arg, list)
except:
arg = [arg] * num
assert len(arg) == num
return arg
def prep_bool(arg, num):
try:
arg = eval(arg)
assert isinstance(arg, list)
except:
arg = [arg] * num
assert len(arg) == num
return arg
| 22.176471
| 48
| 0.570292
| 110
| 754
| 3.836364
| 0.209091
| 0.199052
| 0.127962
| 0.14218
| 0.902844
| 0.902844
| 0.902844
| 0.902844
| 0.902844
| 0.902844
| 0
| 0
| 0.312997
| 754
| 33
| 49
| 22.848485
| 0.814672
| 0
| 0
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 1
| 0.137931
| false
| 0
| 0.034483
| 0
| 0.310345
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d967af44d9fdb59cad0584933b6d4afd5caf7ed
| 74,780
|
py
|
Python
|
fpl/tests.py
|
sornars/leaguetracker
|
b83cd21db2edfeeeaecc27b768107e5b7bf0cd93
|
[
"MIT"
] | null | null | null |
fpl/tests.py
|
sornars/leaguetracker
|
b83cd21db2edfeeeaecc27b768107e5b7bf0cd93
|
[
"MIT"
] | null | null | null |
fpl/tests.py
|
sornars/leaguetracker
|
b83cd21db2edfeeeaecc27b768107e5b7bf0cd93
|
[
"MIT"
] | null | null | null |
import datetime
import decimal
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
from django.utils import timezone
from unittest.mock import Mock, patch
from fpl.models import (ClassicLeague, HeadToHeadLeague, HeadToHeadMatch, HeadToHeadPerformance, Gameweek,
Manager, ManagerPerformance, ClassicPayout, HeadToHeadPayout)
from leagues.models import League, LeagueEntrant, Season
class ClassicLeagueTestCase(TestCase):
def setUp(self):
User = get_user_model()
self.entrant_1 = User.objects.create(username='entrant_1')
self.entrant_2 = User.objects.create(username='entrant_2')
self.entrant_3 = User.objects.create(username='entrant_3')
season = Season.objects.create(start_date='2018-08-01', end_date='2018-05-15')
league = League.objects.create(name='Test League', entry_fee=10, season=season)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=self.entrant_1, league=league, paid_entry=True),
LeagueEntrant(entrant=self.entrant_2, league=league, paid_entry=True),
LeagueEntrant(entrant=self.entrant_3, league=league, paid_entry=True)
])
ClassicLeague.objects.create(league=league, fpl_league_id=1)
Manager.objects.bulk_create([
Manager(entrant=self.entrant_1, team_name='Team 1', fpl_manager_id=1, season=season),
Manager(entrant=self.entrant_2, team_name='Team 2', fpl_manager_id=2, season=season),
Manager(entrant=self.entrant_3, team_name='Team 3', fpl_manager_id=3, season=season)
])
@patch('fpl.models.Manager.retrieve_performance_data')
@patch('fpl.models.datetime')
@patch('fpl.models.requests.get')
def test_retrieve_league_data(self, mock_requests_get, mock_datetime, _):
league_data = {
'league': {
'name': 'Test League 1'
},
'standings': {
'results': [
{
'entry': 1,
'entry_name': 'Test Manager Team'
},
{
'entry': 2,
'entry_name': 'Team 2'
},
{
'entry': 3,
'entry_name': 'Team 3'
},
{
'entry': 4,
'entry_name': 'Team 4'
},
]
}
}
mock_response = Mock()
mock_response.json.return_value = league_data
mock_requests_get.return_value = mock_response
mock_datetime.date.today.return_value = datetime.date(2018, 5, 10)
mock_datetime.timedelta.side_effect = lambda *args, **kw: datetime.timedelta(*args, **kw)
classic_league = ClassicLeague.objects.get()
classic_league.retrieve_league_data()
self.assertEqual(Manager.objects.count(), 4)
self.assertEqual(Manager.objects.get(fpl_manager_id=1).team_name, 'Test Manager Team')
self.assertEqual(League.objects.get().name, 'Test League 1')
self.assertIsNotNone(classic_league.last_updated)
@patch('fpl.models.Manager.retrieve_performance_data')
@patch('fpl.models.requests.get')
def test_retrieve_league_data_after_season_end_does_not_update(self, mock_requests_get, _):
league_data = {
'league': {
'name': 'Test League 1'
},
'standings': {
'results': [
{
'entry': 1,
'entry_name': 'Test Manager Team'
},
{
'entry': 2,
'entry_name': 'Team 2'
},
{
'entry': 3,
'entry_name': 'Team 3'
},
{
'entry': 4,
'entry_name': 'Team 4'
},
]
}
}
mock_response = Mock()
mock_response.json.return_value = league_data
mock_requests_get.return_value = mock_response
classic_league = ClassicLeague.objects.get()
today = datetime.date.today()
season = Season.objects.create(start_date='2018-08-01', end_date=today - datetime.timedelta(days=14))
classic_league.league.season = season
classic_league.league.save()
classic_league.retrieve_league_data()
self.assertEqual(Manager.objects.count(), 3)
self.assertEqual(Manager.objects.get(fpl_manager_id=1).team_name, 'Team 1')
self.assertEqual(League.objects.get().name, 'Test League')
self.assertIsNone(classic_league.last_updated)
mock_requests_get.assert_not_called()
@patch('fpl.models.Gameweek.retrieve_gameweek_data')
@patch('fpl.models.ClassicLeague.retrieve_league_data')
def test_process_payouts(self, mock_retrieve_league_data, mock_retrieve_gameweek_data):
classic_league = ClassicLeague.objects.get()
gameweek_1 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-02',
season=classic_league.league.season)
gameweek_2 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-09',
season=classic_league.league.season)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-16',
season=classic_league.league.season)
payout_1 = ClassicPayout.objects.create(
league=classic_league.league,
name='Test Payout',
amount=10,
position=1,
start_date=gameweek_1.start_date,
end_date=gameweek_1.end_date,
paid_out=False
)
payout_2 = ClassicPayout.objects.create(
league=classic_league.league,
name='Test Payout',
amount=10,
position=1,
start_date=gameweek_2.start_date,
end_date=gameweek_2.end_date,
paid_out=False
)
payout_3 = ClassicPayout.objects.create(
league=classic_league.league,
name='Test Payout',
amount=10,
position=1,
start_date=gameweek_3.start_date,
end_date=gameweek_3.end_date,
paid_out=False
)
manager_1, manager_2, manager_3 = Manager.objects.all()
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_1, score=0)
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_2, score=0)
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_3, score=10)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek_1, score=0)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek_2, score=10)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek_3, score=10)
ManagerPerformance.objects.create(manager=manager_3, gameweek=gameweek_1, score=0)
ManagerPerformance.objects.create(manager=manager_3, gameweek=gameweek_2, score=20)
ManagerPerformance.objects.create(manager=manager_3, gameweek=gameweek_3, score=15)
classic_league.process_payouts()
self.assertEqual(ClassicPayout.objects.count(), 2)
payout_1_processed, payout_2_processed = ClassicPayout.objects.all()
mock_retrieve_gameweek_data.assert_called_once()
mock_retrieve_league_data.assert_called_once()
self.assertEqual(payout_1_processed.amount, 20)
self.assertEqual(payout_1_processed.start_date,
datetime.datetime.strptime(payout_1.start_date, '%Y-%m-%d').date())
self.assertEqual(payout_1_processed.end_date, datetime.datetime.strptime(payout_2.end_date, '%Y-%m-%d').date())
self.assertEqual(payout_1_processed.winner, manager_3.entrant)
self.assertEqual(payout_2_processed.amount, 10)
self.assertEqual(payout_2_processed.start_date,
datetime.datetime.strptime(payout_3.start_date, '%Y-%m-%d').date())
self.assertEqual(payout_2_processed.end_date, datetime.datetime.strptime(payout_3.end_date, '%Y-%m-%d').date())
self.assertEqual(payout_2_processed.winner, manager_3.entrant)
def test_managers(self):
classic_league = ClassicLeague.objects.get()
gameweek_1 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-02',
season=classic_league.league.season)
gameweek_2 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-09',
season=classic_league.league.season)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-16',
season=classic_league.league.season)
manager_1, manager_2, manager_3 = Manager.objects.all()
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_1, score=0)
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_2, score=0)
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_3, score=10)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek_1, score=0)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek_2, score=10)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek_3, score=10)
ManagerPerformance.objects.create(manager=manager_3, gameweek=gameweek_1, score=0)
ManagerPerformance.objects.create(manager=manager_3, gameweek=gameweek_2, score=20)
ManagerPerformance.objects.create(manager=manager_3, gameweek=gameweek_3, score=15)
self.assertEqual(len(classic_league.managers), 3)
self.assertEqual(classic_league.managers[0].current_score, 35)
self.assertEqual(classic_league.managers[1].current_score, 20)
self.assertEqual(classic_league.managers[2].current_score, 10)
self.assertTrue(classic_league.managers[0].paid_entry)
self.assertTrue(classic_league.managers[1].paid_entry)
self.assertTrue(classic_league.managers[2].paid_entry)
season_2 = Season.objects.create(start_date='2019-08-01', end_date='2019-05-15')
Manager.objects.bulk_create([
Manager(entrant=self.entrant_1, team_name='Team 1', fpl_manager_id=1, season=season_2),
Manager(entrant=self.entrant_2, team_name='Team 2', fpl_manager_id=2, season=season_2),
Manager(entrant=self.entrant_3, team_name='Team 3', fpl_manager_id=3, season=season_2)
])
league_2 = League.objects.create(name='Test League 2', entry_fee=10, season=season_2)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=self.entrant_1, league=league_2, paid_entry=False),
LeagueEntrant(entrant=self.entrant_2, league=league_2, paid_entry=False),
LeagueEntrant(entrant=self.entrant_3, league=league_2, paid_entry=False)
])
classic_league_2 = ClassicLeague.objects.create(league=league_2, fpl_league_id=1)
gameweek_4 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-02',
season=classic_league_2.league.season)
gameweek_5 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-09',
season=classic_league_2.league.season)
gameweek_6 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-16',
season=classic_league_2.league.season)
manager_1, manager_2, manager_3 = Manager.objects.filter(season=season_2)
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_4, score=5)
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_5, score=5)
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_6, score=15)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek_4, score=5)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek_5, score=15)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek_6, score=15)
ManagerPerformance.objects.create(manager=manager_3, gameweek=gameweek_4, score=5)
ManagerPerformance.objects.create(manager=manager_3, gameweek=gameweek_5, score=25)
ManagerPerformance.objects.create(manager=manager_3, gameweek=gameweek_6, score=20)
self.assertEqual(len(classic_league.managers), 3)
self.assertEqual(classic_league.managers[0].current_score, 35)
self.assertEqual(classic_league.managers[1].current_score, 20)
self.assertEqual(classic_league.managers[2].current_score, 10)
self.assertTrue(classic_league.managers[0].paid_entry)
self.assertTrue(classic_league.managers[1].paid_entry)
self.assertTrue(classic_league.managers[2].paid_entry)
self.assertEqual(len(classic_league_2.managers), 3)
self.assertEqual(classic_league_2.managers[0].current_score, 50)
self.assertEqual(classic_league_2.managers[1].current_score, 35)
self.assertEqual(classic_league_2.managers[2].current_score, 25)
self.assertFalse(classic_league_2.managers[0].paid_entry)
self.assertFalse(classic_league_2.managers[1].paid_entry)
self.assertFalse(classic_league_2.managers[2].paid_entry)
class HeadToHeadLeagueTestCase(TestCase):
def setUp(self):
User = get_user_model()
self.entrant_1 = User.objects.create(username='entrant_1')
self.entrant_2 = User.objects.create(username='entrant_2')
self.entrant_3 = User.objects.create(username='entrant_3')
self.season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-13')
league = League.objects.create(name='Test League', entry_fee=10, season=self.season)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=self.entrant_1, league=league, paid_entry=True),
LeagueEntrant(entrant=self.entrant_2, league=league, paid_entry=True),
LeagueEntrant(entrant=self.entrant_3, league=league, paid_entry=True)
])
HeadToHeadLeague.objects.create(league=league, fpl_league_id=1)
Manager.objects.bulk_create([
Manager(entrant=self.entrant_1, team_name='Team 1', fpl_manager_id=1, season=self.season),
Manager(entrant=self.entrant_2, team_name='Team 2', fpl_manager_id=2, season=self.season),
Manager(entrant=self.entrant_3, team_name='Team 3', fpl_manager_id=3, season=self.season)
])
Gameweek.objects.bulk_create([
Gameweek(number=1, start_date='2017-08-01', end_date='2017-08-02', season=self.season),
Gameweek(number=2, start_date='2017-08-08', end_date='2017-08-09', season=self.season),
Gameweek(number=3, start_date='2017-08-15', end_date='2017-08-16', season=self.season)
])
@patch('fpl.models.HeadToHeadMatch.calculate_score')
@patch('fpl.models.Manager.retrieve_performance_data')
@patch('fpl.models.datetime')
@patch('fpl.models.FPLLeague.get_authorized_session')
def test_retrieve_league_data(self, mock_get_authorized_session, mock_datetime, *_):
league_data = {
'league': {
'name': 'Test League 1'
},
'league-entries': [
{
'entry': 1,
'entry_name': 'Test Manager Team'
},
{
'entry': 2,
'entry_name': 'Team 2'
},
{
'entry': 3,
'entry_name': 'Team 3'
},
{
'entry': 4,
'entry_name': 'Team 4'
},
],
'matches': {
'has_next': False,
'results': [
{
'id': 3,
'event': 3,
'entry_1_entry': 1,
'entry_1_points': 10,
'entry_2_entry': 4,
'entry_2_points': 20
},
{
'id': 4,
'event': 3,
'entry_1_entry': 2,
'entry_1_points': 10,
'entry_2_entry': 3,
'entry_2_points': 20
}
]
}
}
mock_response = Mock()
mock_response.json.return_value = league_data
mock_session = Mock()
mock_session.get.return_value = mock_response
mock_get_authorized_session.return_value = mock_session
mock_datetime.date.today.return_value = datetime.date(2018, 5, 10)
mock_datetime.timedelta.side_effect = lambda *args, **kw: datetime.timedelta(*args, **kw)
h2h_league = HeadToHeadLeague.objects.get()
h2h_league.retrieve_league_data()
self.assertEqual(Manager.objects.count(), 4)
self.assertEqual(Manager.objects.get(fpl_manager_id=1).team_name, 'Test Manager Team')
self.assertEqual(League.objects.get().name, 'Test League 1')
self.assertEqual(HeadToHeadMatch.objects.count(), 2)
self.assertIsNotNone(h2h_league.last_updated)
@patch('fpl.models.HeadToHeadMatch.calculate_score')
@patch('fpl.models.Manager.retrieve_performance_data')
@patch('fpl.models.datetime')
@patch('fpl.models.FPLLeague.get_authorized_session')
def test_retrieve_league_data_odd_number_of_entrants(self, mock_get_authorized_session, mock_datetime, *_):
league_data = {
'league': {
'name': 'Test League 1'
},
'league-entries': [
{
'entry': 1,
'entry_name': 'Test Manager Team'
},
{
'entry': 2,
'entry_name': 'Team 2'
},
{
'entry': 3,
'entry_name': 'Team 3'
}
],
'matches': {
'has_next': False,
'results': [
{
'id': 3,
'event': 3,
'entry_1_entry': 1,
'entry_1_points': 10,
'entry_2_entry': 'AVERAGE',
'entry_2_points': 20
},
{
'id': 4,
'event': 3,
'entry_1_entry': 2,
'entry_1_points': 10,
'entry_2_entry': 3,
'entry_2_points': 20
}
]
}
}
mock_response = Mock()
mock_response.json.return_value = league_data
mock_session = Mock()
mock_session.get.return_value = mock_response
mock_get_authorized_session.return_value = mock_session
mock_datetime.date.today.return_value = datetime.date(2018, 5, 10)
mock_datetime.timedelta.side_effect = lambda *args, **kw: datetime.timedelta(*args, **kw)
h2h_league = HeadToHeadLeague.objects.get()
h2h_league.retrieve_league_data()
self.assertEqual(Manager.objects.count(), 4)
self.assertEqual(Manager.objects.get(fpl_manager_id=1, season=self.season).team_name, 'Test Manager Team')
self.assertEqual(League.objects.get().name, 'Test League 1')
self.assertEqual(HeadToHeadMatch.objects.count(), 2)
self.assertIsNotNone(h2h_league.last_updated)
average_manager = Manager.objects.get(season=self.season, fpl_manager_id=h2h_league.fpl_league_id*-1)
self.assertEqual(average_manager.team_name, 'AVERAGE')
@patch('fpl.models.HeadToHeadMatch.calculate_score')
@patch('fpl.models.Manager.retrieve_performance_data')
@patch('fpl.models.FPLLeague.get_authorized_session')
def test_retrieve_league_data_after_season_end_does_not_update(self, mock_get_authorized_session, *_):
league_data = {
'league': {
'name': 'Test League 1'
},
'league-entries': [
{
'entry': 1,
'entry_name': 'Test Manager Team'
},
{
'entry': 2,
'entry_name': 'Team 2'
},
{
'entry': 3,
'entry_name': 'Team 3'
},
{
'entry': 4,
'entry_name': 'Team 4'
},
],
'matches': {
'has_next': False,
'results': [
{
'id': 3,
'event': 3,
'entry_1_entry': 1,
'entry_1_points': 10,
'entry_2_entry': 4,
'entry_2_points': 20
},
{
'id': 4,
'event': 3,
'entry_1_entry': 2,
'entry_1_points': 10,
'entry_2_entry': 3,
'entry_2_points': 20
}
]
}
}
mock_response = Mock()
mock_response.json.return_value = league_data
mock_session = Mock()
mock_session.get.return_value = mock_response
mock_get_authorized_session.return_value = mock_session
h2h_league = HeadToHeadLeague.objects.get()
today = datetime.date.today()
season = Season.objects.create(start_date='2018-08-01', end_date=today - datetime.timedelta(days=14))
h2h_league.league.season = season
h2h_league.league.save()
h2h_league.retrieve_league_data()
self.assertEqual(Manager.objects.count(), 3)
self.assertEqual(Manager.objects.get(fpl_manager_id=1).team_name, 'Team 1')
self.assertEqual(League.objects.get().name, 'Test League')
self.assertEqual(HeadToHeadMatch.objects.count(), 0)
self.assertIsNone(h2h_league.last_updated)
mock_get_authorized_session.assert_not_called()
@patch('fpl.models.Gameweek.retrieve_gameweek_data')
@patch('fpl.models.HeadToHeadLeague.retrieve_league_data')
def test_process_payouts(self, mock_retrieve_league_data, mock_retrieve_gameweek_data):
h2h_league = HeadToHeadLeague.objects.get()
gameweek_1, gameweek_2, gameweek_3 = Gameweek.objects.order_by('start_date').all()
payout_1 = HeadToHeadPayout.objects.create(
league=h2h_league.league,
name='Test Payout',
amount=10,
position=1,
start_date=gameweek_1.start_date,
end_date=gameweek_1.end_date,
paid_out=False
)
payout_2 = HeadToHeadPayout.objects.create(
league=h2h_league.league,
name='Test Payout',
amount=10,
position=1,
start_date=gameweek_2.start_date,
end_date=gameweek_2.end_date,
paid_out=False
)
payout_3 = HeadToHeadPayout.objects.create(
league=h2h_league.league,
name='Test Payout',
amount=10,
position=1,
start_date=gameweek_3.start_date,
end_date=gameweek_3.end_date,
paid_out=False
)
manager_1, manager_2, manager_3 = Manager.objects.all()
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_1, gameweek=gameweek_1, score=0)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_1, gameweek=gameweek_2, score=0)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_1, gameweek=gameweek_3, score=10)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_2, gameweek=gameweek_1, score=0)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_2, gameweek=gameweek_2, score=10)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_2, gameweek=gameweek_3, score=10)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_3, gameweek=gameweek_1, score=0)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_3, gameweek=gameweek_2, score=20)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_3, gameweek=gameweek_3, score=15)
h2h_league.process_payouts()
mock_retrieve_gameweek_data.assert_called_once()
mock_retrieve_league_data.assert_called_once()
self.assertEqual(HeadToHeadPayout.objects.count(), 2)
payout_1_processed, payout_2_processed = HeadToHeadPayout.objects.all()
self.assertEqual(payout_1_processed.amount, 20)
self.assertEqual(payout_1_processed.start_date,
payout_1.start_date)
self.assertEqual(payout_1_processed.end_date, payout_2.end_date)
self.assertEqual(payout_1_processed.winner, manager_3.entrant)
self.assertEqual(payout_2_processed.amount, 10)
self.assertEqual(payout_2_processed.start_date,
payout_3.start_date)
self.assertEqual(payout_2_processed.end_date, payout_3.end_date)
self.assertEqual(payout_2_processed.winner, manager_3.entrant)
def test_managers(self):
h2h_league = HeadToHeadLeague.objects.get()
gameweek_1, gameweek_2, gameweek_3 = Gameweek.objects.order_by('start_date').all()
manager_1, manager_2, manager_3 = Manager.objects.all()
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_1, gameweek=gameweek_1, score=0)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_1, gameweek=gameweek_2, score=0)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_1, gameweek=gameweek_3, score=10)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_2, gameweek=gameweek_1, score=0)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_2, gameweek=gameweek_2, score=10)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_2, gameweek=gameweek_3, score=10)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_3, gameweek=gameweek_1, score=0)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_3, gameweek=gameweek_2, score=20)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league, manager=manager_3, gameweek=gameweek_3, score=15)
self.assertEqual(len(h2h_league.managers), 3)
self.assertEqual(h2h_league.managers[0].current_h2h_score, 35)
self.assertEqual(h2h_league.managers[1].current_h2h_score, 20)
self.assertEqual(h2h_league.managers[2].current_h2h_score, 10)
self.assertTrue(h2h_league.managers[0].paid_entry)
self.assertTrue(h2h_league.managers[1].paid_entry)
self.assertTrue(h2h_league.managers[2].paid_entry)
season_2 = Season.objects.create(start_date='2018-08-01', end_date='2019-05-13')
league_2 = League.objects.create(name='Test League 2', entry_fee=10, season=season_2)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=self.entrant_1, league=league_2, paid_entry=False),
LeagueEntrant(entrant=self.entrant_2, league=league_2, paid_entry=False),
LeagueEntrant(entrant=self.entrant_3, league=league_2, paid_entry=False)
])
h2h_league_2 = HeadToHeadLeague.objects.create(league=league_2, fpl_league_id=1)
Manager.objects.bulk_create([
Manager(entrant=self.entrant_1, team_name='Team 1', fpl_manager_id=1, season=season_2),
Manager(entrant=self.entrant_2, team_name='Team 2', fpl_manager_id=2, season=season_2),
Manager(entrant=self.entrant_3, team_name='Team 3', fpl_manager_id=3, season=season_2)
])
gameweek_4 = Gameweek.objects.create(number=1, start_date='2018-08-01', end_date='2018-08-02',
season=h2h_league_2.league.season)
gameweek_5 = Gameweek.objects.create(number=2, start_date='2018-08-08', end_date='2018-08-09',
season=h2h_league_2.league.season)
gameweek_6 = Gameweek.objects.create(number=3, start_date='2018-08-15', end_date='2018-08-16',
season=h2h_league_2.league.season)
manager_1, manager_2, manager_3 = Manager.objects.filter(season=season_2)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league_2, manager=manager_1, gameweek=gameweek_4, score=5)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league_2, manager=manager_1, gameweek=gameweek_5, score=5)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league_2, manager=manager_1, gameweek=gameweek_6, score=15)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league_2, manager=manager_2, gameweek=gameweek_4, score=5)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league_2, manager=manager_2, gameweek=gameweek_5, score=15)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league_2, manager=manager_2, gameweek=gameweek_6, score=15)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league_2, manager=manager_3, gameweek=gameweek_4, score=5)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league_2, manager=manager_3, gameweek=gameweek_5, score=25)
HeadToHeadPerformance.objects.create(h2h_league=h2h_league_2, manager=manager_3, gameweek=gameweek_6, score=20)
self.assertEqual(len(h2h_league_2.managers), 3)
self.assertEqual(h2h_league_2.managers[0].current_h2h_score, 50)
self.assertEqual(h2h_league_2.managers[1].current_h2h_score, 35)
self.assertEqual(h2h_league_2.managers[2].current_h2h_score, 25)
self.assertFalse(h2h_league_2.managers[0].paid_entry)
self.assertFalse(h2h_league_2.managers[1].paid_entry)
self.assertFalse(h2h_league_2.managers[2].paid_entry)
class HeadToHeadMatchTestCase(TestCase):
def test_calculate_score(self):
User = get_user_model()
entrant_1 = User.objects.create(username='entrant_1')
entrant_2 = User.objects.create(username='entrant_2')
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
league = League.objects.create(name='Test League', entry_fee=10, season=season)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=entrant_1, league=league, paid_entry=True),
LeagueEntrant(entrant=entrant_2, league=league, paid_entry=True)
])
h2h_league = HeadToHeadLeague.objects.create(league=league, fpl_league_id=1)
manager_1 = Manager.objects.create(entrant=entrant_1, team_name='Team 1', fpl_manager_id=1, season=season)
manager_2 = Manager.objects.create(entrant=entrant_2, team_name='Team 2', fpl_manager_id=2, season=season)
gameweek = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-03', season=season)
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek, score=0)
ManagerPerformance.objects.create(manager=manager_2, gameweek=gameweek, score=10)
h2h_match = HeadToHeadMatch.objects.create(fpl_match_id=1, h2h_league=h2h_league, gameweek=gameweek,
manager_1=manager_1, manager_2=manager_2)
h2h_match.calculate_score()
self.assertEqual(HeadToHeadPerformance.objects.count(), 2)
manager_1_h2h_performance = HeadToHeadPerformance.objects.get(manager=manager_1, gameweek=gameweek,
h2h_league=h2h_league)
manager_2_h2h_performance = HeadToHeadPerformance.objects.get(manager=manager_2, gameweek=gameweek,
h2h_league=h2h_league)
self.assertEqual(manager_1_h2h_performance.score, 0)
self.assertEqual(manager_2_h2h_performance.score, 3)
class ManagerTestCase(TestCase):
def setUp(self):
User = get_user_model()
entrant_1 = User.objects.create(username='entrant_1')
self.season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
self.season.refresh_from_db()
manager_1 = Manager.objects.create(entrant=entrant_1, team_name='Team 1', fpl_manager_id=1, season=self.season)
gameweek_1 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-03',
season=self.season)
gameweek_2 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-11',
season=self.season)
ManagerPerformance.objects.create(manager=manager_1, gameweek=gameweek_1, score=0)
@patch('fpl.models.datetime')
@patch('fpl.models.requests.get')
def test_retrieve_performance_data(self, mock_requests_get, mock_datetime):
performance_data = {
'history': [
{
'event': 1,
'points': 10,
'event_transfers_cost': 0
},
{
'event': 2,
'points': 10,
'event_transfers_cost': 8
}
]
}
mock_response = Mock()
mock_response.json.return_value = performance_data
mock_requests_get.return_value = mock_response
mock_datetime.date.today.return_value = datetime.date(2018, 5, 10)
mock_datetime.timedelta.side_effect = lambda *args, **kw: datetime.timedelta(*args, **kw)
manager = Manager.objects.get()
manager.retrieve_performance_data(self.season)
self.assertEqual(ManagerPerformance.objects.count(), 2)
self.assertEqual(
ManagerPerformance.objects.get(
manager=manager,
gameweek=Gameweek.objects.get(number=1)
).score,
10
)
self.assertEqual(
ManagerPerformance.objects.get(
manager=manager,
gameweek=Gameweek.objects.get(number=2)
).score,
2
)
@patch('fpl.models.requests.get')
def test_retrieve_league_performance_after_season_end_does_not_update(self, mock_requests_get):
performance_data = {
'history': [
{
'event': 1,
'points': 10,
'event_transfers_cost': 0
},
{
'event': 2,
'points': 10,
'event_transfers_cost': 8
}
]
}
mock_response = Mock()
mock_response.json.return_value = performance_data
mock_requests_get.return_value = mock_response
manager = Manager.objects.get()
today = datetime.date.today()
season = Season.objects.create(start_date='2018-08-01', end_date=today - datetime.timedelta(days=14))
manager.retrieve_performance_data(season)
self.assertEqual(ManagerPerformance.objects.count(), 1)
self.assertEqual(
ManagerPerformance.objects.get(
manager=manager,
gameweek=Gameweek.objects.get(number=1)
).score,
0
)
mock_requests_get.assert_not_called()
class GameweekTestCase(TestCase):
@patch('fpl.models.datetime')
@patch('fpl.models.requests.get')
def test_retrieve_gameweek_data(self, mock_requests_get, mock_datetime):
fixture_data = [
{
"kickoff_time": "2017-08-11T18:45:00Z",
"event": 1
}, {
"kickoff_time": "2017-08-12T11:30:00Z",
"event": 2
},
{
"kickoff_time": None,
"event": None
},
]
gameweek_data = {
'events': [
{
'id': 1,
'deadline_time': '2017-08-11T17:45:00Z'
},
{
'id': 2,
'deadline_time': '2017-08-18T17:45:00Z'
}
]
}
mock_response = Mock()
mock_response.json.side_effect = [fixture_data, gameweek_data]
mock_requests_get.return_value = mock_response
mock_datetime.date.today.return_value = datetime.date(2018, 5, 10)
mock_datetime.timedelta.side_effect = lambda *args, **kw: datetime.timedelta(*args, **kw)
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-13')
season.refresh_from_db()
Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-03', season=season)
Gameweek.retrieve_gameweek_data(season)
self.assertEqual(Gameweek.objects.count(), 2)
self.assertEqual(Gameweek.objects.get(number=1).start_date, datetime.date(2017, 8, 11))
self.assertEqual(Gameweek.objects.get(number=2).start_date, datetime.date(2017, 8, 18))
@patch('fpl.models.requests.get')
@patch('fpl.models.timezone.now', side_effect=lambda: datetime.datetime.now())
def test_retrieve_gameweek_data_does_nothing_after_season_end(self, mock_timezone_now, mock_requests_get):
season = Season.objects.create(start_date='2017-08-01', end_date=timezone.now() - datetime.timedelta(days=13))
season.refresh_from_db()
Gameweek.retrieve_gameweek_data(season)
self.assertEqual(mock_requests_get.call_count, 2)
mock_requests_get.reset_mock()
season = Season.objects.create(start_date='2017-08-01', end_date=timezone.now() - datetime.timedelta(days=14))
season.refresh_from_db()
Gameweek.retrieve_gameweek_data(season)
mock_requests_get.assert_not_called()
class ClassicPayoutTestCase(TestCase):
def setUp(self):
User = get_user_model()
self.entrant_1 = User.objects.create(username='entrant_1')
self.entrant_2 = User.objects.create(username='entrant_2')
self.entrant_3 = User.objects.create(username='entrant_3')
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
self.league = League.objects.create(name='Test League', entry_fee=10, season=season)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=self.entrant_1, league=self.league, paid_entry=True),
LeagueEntrant(entrant=self.entrant_2, league=self.league, paid_entry=True),
LeagueEntrant(entrant=self.entrant_3, league=self.league, paid_entry=True)
])
self.manager_1 = Manager.objects.create(entrant=self.entrant_1, team_name='Team 1', fpl_manager_id=1, season=season)
self.manager_2 = Manager.objects.create(entrant=self.entrant_2, team_name='Team 2', fpl_manager_id=2, season=season)
self.manager_3 = Manager.objects.create(entrant=self.entrant_3, team_name='Team 3', fpl_manager_id=3, season=season)
gameweek_1 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-03', season=season)
gameweek_2 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-11', season=season)
ManagerPerformance.objects.bulk_create([
ManagerPerformance(manager=self.manager_1, gameweek=gameweek_1, score=0),
ManagerPerformance(manager=self.manager_2, gameweek=gameweek_1, score=10),
ManagerPerformance(manager=self.manager_3, gameweek=gameweek_1, score=0),
ManagerPerformance(manager=self.manager_1, gameweek=gameweek_2, score=0),
ManagerPerformance(manager=self.manager_2, gameweek=gameweek_2, score=10),
ManagerPerformance(manager=self.manager_3, gameweek=gameweek_2, score=30)
])
def test_calculate_winner_single_winner(self):
payout_1 = ClassicPayout.objects.create(
league=self.league,
name='Test Payout',
amount=10,
position=1,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
ClassicPayout.objects.create(
league=self.league,
name='Test Payout',
amount=10,
position=1,
start_date='2017-09-01',
end_date='2017-09-30',
paid_out=False
)
payout_1.calculate_winner()
self.assertEqual(payout_1.winner, self.entrant_3)
def test_calculate_winner_single_winner_tie_without_future_payout(self):
payout = ClassicPayout.objects.create(
league=self.league,
name='Test Payout',
amount=10,
position=1,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-18',
season=self.league.season)
ManagerPerformance.objects.bulk_create([
ManagerPerformance(manager=self.manager_1, gameweek=gameweek_3, score=30),
ManagerPerformance(manager=self.manager_2, gameweek=gameweek_3, score=10),
ManagerPerformance(manager=self.manager_3, gameweek=gameweek_3, score=0)
])
payout.calculate_winner()
payout_1, payout_2, payout_3 = ClassicPayout.objects.all()
self.assertEqual(ClassicPayout.objects.count(), 3)
self.assertEqual(payout_1.amount, decimal.Decimal('3.34'))
self.assertEqual(payout_2.amount, decimal.Decimal('3.33'))
self.assertEqual(payout_3.amount, decimal.Decimal('3.33'))
def test_calculate_single_winner_tie_with_future_payout(self):
payout_1 = ClassicPayout.objects.create(
league=self.league,
name='Test Payout 1',
amount=10,
position=1,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
ClassicPayout.objects.create(
league=self.league,
name='Test Payout 2',
amount=10,
position=1,
start_date='2017-09-01',
end_date='2017-09-30',
paid_out=False
)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-18',
season=self.league.season)
ManagerPerformance.objects.bulk_create([
ManagerPerformance(manager=self.manager_1, gameweek=gameweek_3, score=30),
ManagerPerformance(manager=self.manager_2, gameweek=gameweek_3, score=10),
ManagerPerformance(manager=self.manager_3, gameweek=gameweek_3, score=0)
])
payout_1.calculate_winner()
self.assertEqual(ClassicPayout.objects.count(), 1)
payout = ClassicPayout.objects.get()
self.assertEqual(payout.start_date, datetime.datetime.strptime(payout_1.start_date, '%Y-%m-%d').date())
self.assertEqual(payout.amount, 20)
def test_calculate_multiple_positions_tie(self):
payout_1 = ClassicPayout.objects.create(
league=self.league,
name='Test Payout 1',
amount=10,
position=1,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
payout_2 = ClassicPayout.objects.create(
league=self.league,
name='Test Payout 2',
amount=10,
position=2,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-18',
season=self.league.season)
ManagerPerformance.objects.bulk_create([
ManagerPerformance(manager=self.manager_1, gameweek=gameweek_3, score=20),
ManagerPerformance(manager=self.manager_2, gameweek=gameweek_3, score=0),
ManagerPerformance(manager=self.manager_3, gameweek=gameweek_3, score=0)
])
with self.assertRaises(NotImplementedError):
payout_1.calculate_winner()
with self.assertRaises(NotImplementedError):
payout_2.calculate_winner()
class HeadToHeadPayoutTestCase(TestCase):
def setUp(self):
User = get_user_model()
self.entrant_1 = User.objects.create(username='entrant_1')
self.entrant_2 = User.objects.create(username='entrant_2')
self.entrant_3 = User.objects.create(username='entrant_3')
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
self.league = League.objects.create(name='Test League', entry_fee=10, season=season)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=self.entrant_1, league=self.league, paid_entry=True),
LeagueEntrant(entrant=self.entrant_2, league=self.league, paid_entry=True),
LeagueEntrant(entrant=self.entrant_3, league=self.league, paid_entry=True)
])
self.manager_1 = Manager.objects.create(entrant=self.entrant_1, team_name='Team 1', fpl_manager_id=1, season=season)
self.manager_2 = Manager.objects.create(entrant=self.entrant_2, team_name='Team 2', fpl_manager_id=2, season=season)
self.manager_3 = Manager.objects.create(entrant=self.entrant_3, team_name='Team 3', fpl_manager_id=3, season=season)
gameweek_1 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-03', season=season)
gameweek_2 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-11', season=season)
self.h2h_league = HeadToHeadLeague.objects.create(league=self.league, fpl_league_id=1)
HeadToHeadPerformance.objects.bulk_create([
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_1, gameweek=gameweek_1, score=0),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_2, gameweek=gameweek_1, score=1),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_3, gameweek=gameweek_1, score=0),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_1, gameweek=gameweek_2, score=0),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_2, gameweek=gameweek_2, score=1),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_3, gameweek=gameweek_2, score=3)
])
def test_calculate_winner_single_winner(self):
payout_1 = HeadToHeadPayout.objects.create(
league=self.league,
name='Test Payout',
amount=10,
position=1,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
HeadToHeadPayout.objects.create(
league=self.league,
name='Test Payout',
amount=10,
position=1,
start_date='2017-09-01',
end_date='2017-09-30',
paid_out=False
)
payout_1.calculate_winner()
self.assertEqual(payout_1.winner, self.entrant_3)
def test_calculate_winner_single_winner_tie_without_future_payout(self):
payout = HeadToHeadPayout.objects.create(
league=self.league,
name='Test Payout',
amount=10,
position=1,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-18',
season=self.league.season)
HeadToHeadPerformance.objects.bulk_create([
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_1, gameweek=gameweek_3, score=3),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_2, gameweek=gameweek_3, score=1),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_3, gameweek=gameweek_3, score=0)
])
payout.calculate_winner()
payout_1, payout_2, payout_3 = HeadToHeadPayout.objects.all()
self.assertEqual(HeadToHeadPayout.objects.count(), 3)
self.assertEqual(payout_1.amount, decimal.Decimal('3.34'))
self.assertEqual(payout_2.amount, decimal.Decimal('3.33'))
self.assertEqual(payout_3.amount, decimal.Decimal('3.33'))
def test_calculate_single_winner_tie_with_future_payout(self):
payout_1 = HeadToHeadPayout.objects.create(
league=self.league,
name='Test Payout 1',
amount=10,
position=1,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
HeadToHeadPayout.objects.create(
league=self.league,
name='Test Payout 2',
amount=10,
position=1,
start_date='2017-09-01',
end_date='2017-09-30',
paid_out=False
)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-18',
season=self.league.season)
HeadToHeadPerformance.objects.bulk_create([
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_1, gameweek=gameweek_3, score=3),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_2, gameweek=gameweek_3, score=1),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_3, gameweek=gameweek_3, score=0)
])
payout_1.calculate_winner()
self.assertEqual(HeadToHeadPayout.objects.count(), 1)
payout = HeadToHeadPayout.objects.get()
self.assertEqual(payout.amount, 20)
def test_calculate_multiple_positions_tie(self):
payout_1 = HeadToHeadPayout.objects.create(
league=self.league,
name='Test Payout 1',
amount=10,
position=1,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
payout_2 = HeadToHeadPayout.objects.create(
league=self.league,
name='Test Payout 2',
amount=10,
position=2,
start_date='2017-08-01',
end_date='2017-08-31',
paid_out=False
)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-18',
season=self.league.season)
HeadToHeadPerformance.objects.bulk_create([
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_1, gameweek=gameweek_3, score=2),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_2, gameweek=gameweek_3, score=0),
HeadToHeadPerformance(h2h_league=self.h2h_league, manager=self.manager_3, gameweek=gameweek_3, score=0)
])
with self.assertRaises(NotImplementedError):
payout_1.calculate_winner()
with self.assertRaises(NotImplementedError):
payout_2.calculate_winner()
class ClassicLeagueRefreshViewTestCase(TestCase):
@patch('fpl.models.ClassicLeague.process_payouts')
def test_get_redirect_url(self, mock_process_payouts):
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
league_1 = League.objects.create(name='Test League 1', entry_fee=10, season=season)
classic_league = ClassicLeague.objects.create(league=league_1, fpl_league_id=1)
response = self.client.post(reverse('fpl:season:classic:process-payouts', args=[season.pk, classic_league.pk]))
mock_process_payouts.assert_called_once()
mock_process_payouts.reset_mock()
classic_league.last_updated = timezone.now()
classic_league.save()
response = self.client.post(reverse('fpl:season:classic:process-payouts', args=[season.pk, classic_league.pk]))
mock_process_payouts.assert_not_called()
class ClassicLeagueListViewTestCase(TestCase):
def test_title(self):
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
response = self.client.get(reverse('fpl:season:classic:list', args=[season.pk]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Classic Leagues')
self.assertQuerysetEqual(response.context['league_list'], [])
def test_classic_leagues_displayed(self):
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
league_1 = League.objects.create(name='Test League 1', entry_fee=10, season=season)
ClassicLeague.objects.create(league=league_1, fpl_league_id=1)
league_2 = League.objects.create(name='Test League 2', entry_fee=10, season=season)
ClassicLeague.objects.create(league=league_2, fpl_league_id=2)
response = self.client.get(reverse('fpl:season:classic:list', args=[season.pk]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Test League 1')
self.assertContains(response, 'Test League 2')
self.assertQuerysetEqual(response.context['league_list'].order_by('league'),
['<ClassicLeague: (2017-08-01 - 2018-05-15) - Test League 1>', '<ClassicLeague: (2017-08-01 - 2018-05-15) - Test League 2>'])
class ClassicLeagueDetailViewTestCase(TestCase):
def test_league_exists(self):
response = self.client.get(reverse('fpl:season:classic:detail', args=[1, 1]))
self.assertEqual(response.status_code, 404)
def test_title(self):
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
league_1 = League.objects.create(name='Test League 1', entry_fee=10, season=season)
classic_league = ClassicLeague.objects.create(league=league_1, fpl_league_id=1)
response = self.client.get(reverse('fpl:season:classic:detail', args=[season.pk, classic_league.pk]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Classic League: Test League 1')
def test_entrants(self):
User = get_user_model()
entrant_1 = User.objects.create(username='entrant_1', first_name='Test', last_name='User 1')
entrant_2 = User.objects.create(username='entrant_2', first_name='Test', last_name='User 2')
entrant_3 = User.objects.create(username='entrant_3', first_name='Test', last_name='User 3')
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-13')
league = League.objects.create(name='Test League', entry_fee=10, season=season)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=entrant_1, league=league, paid_entry=False),
LeagueEntrant(entrant=entrant_2, league=league, paid_entry=True),
LeagueEntrant(entrant=entrant_3, league=league, paid_entry=True)
])
classic_league = ClassicLeague.objects.create(league=league, fpl_league_id=1)
manager_1 = Manager.objects.create(entrant=entrant_1, team_name='Team 1', fpl_manager_id=1, season=season)
manager_2 = Manager.objects.create(entrant=entrant_2, team_name='Team 2', fpl_manager_id=2, season=season)
manager_3 = Manager.objects.create(entrant=entrant_3, team_name='Team 3', fpl_manager_id=3, season=season)
gameweek_1 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-03', season=season)
gameweek_2 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-11', season=season)
ManagerPerformance.objects.bulk_create([
ManagerPerformance(manager=manager_1, gameweek=gameweek_1, score=0),
ManagerPerformance(manager=manager_2, gameweek=gameweek_1, score=10),
ManagerPerformance(manager=manager_3, gameweek=gameweek_1, score=0),
ManagerPerformance(manager=manager_1, gameweek=gameweek_2, score=0),
ManagerPerformance(manager=manager_2, gameweek=gameweek_2, score=10),
ManagerPerformance(manager=manager_3, gameweek=gameweek_2, score=30)
])
response = self.client.get(reverse('fpl:season:classic:detail', args=[season.pk, classic_league.pk]))
self.assertQuerysetEqual(response.context['object'].managers.order_by('team_name'),
['<Manager: Team 1 - entrant_1>', '<Manager: Team 2 - entrant_2>',
'<Manager: Team 3 - entrant_3>'])
self.assertContains(response, 'Team')
self.assertContains(response, 'Manager')
self.assertContains(response, 'Entry Paid')
self.assertContains(response, 'Score')
self.assertNotContains(response, 'Head To Head Score')
self.assertContains(response, 'Team 1')
self.assertContains(response, 'Test User 1')
self.assertContains(response, 'False')
self.assertContains(response, 0)
self.assertContains(response, 'Team 2')
self.assertContains(response, 'Test User 2')
self.assertContains(response, 'True')
self.assertContains(response, 20)
self.assertContains(response, 'Team 3')
self.assertContains(response, 'Test User 3')
self.assertContains(response, 30)
def test_payouts(self):
User = get_user_model()
entrant_1 = User.objects.create(username='entrant_1', first_name='Test', last_name='User 1')
entrant_2 = User.objects.create(username='entrant_2', first_name='Test', last_name='User 2')
entrant_3 = User.objects.create(username='entrant_3', first_name='Test', last_name='User 3')
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
league = League.objects.create(name='Test League', entry_fee=10, season=season)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=entrant_1, league=league, paid_entry=False),
LeagueEntrant(entrant=entrant_2, league=league, paid_entry=True),
LeagueEntrant(entrant=entrant_3, league=league, paid_entry=True)
])
classic_league = ClassicLeague.objects.create(league=league, fpl_league_id=1)
gameweek_1 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-02', season=season)
gameweek_2 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-09', season=season)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-16', season=season)
payout_1 = ClassicPayout.objects.create(
league=classic_league.league,
name='Test Payout 1',
amount=10,
position=1,
start_date=gameweek_1.start_date,
end_date=gameweek_1.end_date,
winner=entrant_1,
paid_out=True
)
payout_2 = ClassicPayout.objects.create(
league=classic_league.league,
name='Test Payout 2',
amount=20,
position=2,
start_date=gameweek_2.start_date,
end_date=gameweek_2.end_date,
winner=entrant_2,
paid_out=False
)
response = self.client.get(reverse('fpl:season:classic:detail', args=[season.pk, classic_league.pk]))
self.assertQuerysetEqual(response.context['object'].league.payout_set.all().order_by('start_date'),
['<Payout: (2017-08-01 - 2018-05-15) - Test League - Test Payout 1 Position 1 (2017-08-01-2017-08-02): 10.00>',
'<Payout: (2017-08-01 - 2018-05-15) - Test League - Test Payout 2 Position 2 (2017-08-08-2017-08-09): 20.00>'])
self.assertContains(response, 'Name')
self.assertContains(response, 'Position')
self.assertContains(response, 'Start Date')
self.assertContains(response, 'End Date')
self.assertContains(response, 'Amount')
self.assertContains(response, 'Winner')
self.assertContains(response, 'Paid Out')
self.assertContains(response, 'Test Payout 1')
self.assertContains(response, '1')
self.assertContains(response,
datetime.datetime.strptime(gameweek_1.start_date, '%Y-%m-%d').strftime('%b. %-d, %Y'))
self.assertContains(response,
datetime.datetime.strptime(gameweek_1.end_date, '%Y-%m-%d').strftime('%b. %-d, %Y'))
self.assertContains(response, '10.00')
self.assertContains(response, 'Test User 1')
self.assertContains(response, 'True')
self.assertContains(response, 'Test Payout 2')
self.assertContains(response, '1')
self.assertContains(response,
datetime.datetime.strptime(gameweek_2.start_date, '%Y-%m-%d').strftime('%b. %-d, %Y'))
self.assertContains(response,
datetime.datetime.strptime(gameweek_2.end_date, '%Y-%m-%d').strftime('%b. %-d, %Y'))
self.assertContains(response, '20.00')
self.assertContains(response, 'Test User 2')
self.assertContains(response, 'False')
def test_last_updated(self):
now = timezone.now()
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
league = League.objects.create(name='Test League', entry_fee=10, season=season)
classic_league = ClassicLeague.objects.create(league=league, fpl_league_id=1, last_updated=now)
response = self.client.get(reverse('fpl:season:classic:detail', args=[season.pk, classic_league.pk]))
self.assertContains(response, 'Last Updated')
ampm = ''.join([i.lower() + '.' for i in now.strftime('%p')])
self.assertContains(response, now.strftime('%b. %-d, %Y, %-I:%M ' + ampm))
class HeadToHeadLeagueRefreshViewTestCase(TestCase):
@patch('fpl.models.HeadToHeadLeague.process_payouts')
def test_get_redirect_url(self, mock_process_payouts):
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
league_1 = League.objects.create(name='Test League 1', entry_fee=10, season=season)
head_to_head_league = HeadToHeadLeague.objects.create(league=league_1, fpl_league_id=1)
response = self.client.post(reverse('fpl:season:head-to-head:process-payouts', args=[season.pk, head_to_head_league.pk]))
mock_process_payouts.assert_called_once()
mock_process_payouts.reset_mock()
head_to_head_league.last_updated = timezone.now()
head_to_head_league.save()
response = self.client.post(reverse('fpl:season:head-to-head:process-payouts', args=[season.pk, head_to_head_league.pk]))
mock_process_payouts.assert_not_called()
class HeadToHeadLeagueListViewTestCase(TestCase):
def test_title(self):
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
response = self.client.get(reverse('fpl:season:head-to-head:list', args=[season.pk]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Head To Head Leagues')
self.assertQuerysetEqual(response.context['league_list'], [])
def test_head_to_head_leagues_displayed(self):
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-15')
league_1 = League.objects.create(name='Test League 1', entry_fee=10, season=season)
HeadToHeadLeague.objects.create(league=league_1, fpl_league_id=1)
league_2 = League.objects.create(name='Test League 2', entry_fee=10, season=season)
HeadToHeadLeague.objects.create(league=league_2, fpl_league_id=2)
response = self.client.get(reverse('fpl:season:head-to-head:list', args=[season.pk]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Test League 1')
self.assertContains(response, 'Test League 2')
self.assertQuerysetEqual(response.context['league_list'].order_by('league'),
['<HeadToHeadLeague: (2017-08-01 - 2018-05-15) - Test League 1>', '<HeadToHeadLeague: (2017-08-01 - 2018-05-15) - Test League 2>'])
class HeadToHeadLeagueDetailViewTestCase(TestCase):
def test_league_exists(self):
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-13')
response = self.client.get(reverse('fpl:season:head-to-head:detail', args=[season.pk, 1]))
self.assertEqual(response.status_code, 404)
def test_title(self):
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-13')
league_1 = League.objects.create(name='Test League 1', entry_fee=10, season=season)
head_to_head_league = HeadToHeadLeague.objects.create(league=league_1, fpl_league_id=1)
response = self.client.get(reverse('fpl:season:head-to-head:detail', args=[season.pk, head_to_head_league.pk]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Head To Head League: Test League 1')
def test_entrants(self):
User = get_user_model()
entrant_1 = User.objects.create(username='entrant_1', first_name='Test', last_name='User 1')
entrant_2 = User.objects.create(username='entrant_2', first_name='Test', last_name='User 2')
entrant_3 = User.objects.create(username='entrant_3', first_name='Test', last_name='User 3')
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-13')
league = League.objects.create(name='Test League', entry_fee=10, season=season)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=entrant_1, league=league, paid_entry=False),
LeagueEntrant(entrant=entrant_2, league=league, paid_entry=True),
LeagueEntrant(entrant=entrant_3, league=league, paid_entry=True)
])
head_to_head_league = HeadToHeadLeague.objects.create(league=league, fpl_league_id=1)
manager_1 = Manager.objects.create(entrant=entrant_1, team_name='Team 1', fpl_manager_id=1, season=season)
manager_2 = Manager.objects.create(entrant=entrant_2, team_name='Team 2', fpl_manager_id=2, season=season)
manager_3 = Manager.objects.create(entrant=entrant_3, team_name='Team 3', fpl_manager_id=3, season=season)
gameweek_1 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-03', season=season)
gameweek_2 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-11', season=season)
HeadToHeadPerformance.objects.bulk_create([
HeadToHeadPerformance(h2h_league=head_to_head_league, manager=manager_1, gameweek=gameweek_1, score=0),
HeadToHeadPerformance(h2h_league=head_to_head_league, manager=manager_2, gameweek=gameweek_1, score=1),
HeadToHeadPerformance(h2h_league=head_to_head_league, manager=manager_3, gameweek=gameweek_1, score=0),
HeadToHeadPerformance(h2h_league=head_to_head_league, manager=manager_1, gameweek=gameweek_2, score=0),
HeadToHeadPerformance(h2h_league=head_to_head_league, manager=manager_2, gameweek=gameweek_2, score=1),
HeadToHeadPerformance(h2h_league=head_to_head_league, manager=manager_3, gameweek=gameweek_2, score=3)
])
response = self.client.get(reverse('fpl:season:head-to-head:detail', args=[season.pk, head_to_head_league.pk]))
self.assertQuerysetEqual(sorted(response.context['object'].managers, key=lambda x: x.team_name),
['<Manager: Team 1 - entrant_1>', '<Manager: Team 2 - entrant_2>',
'<Manager: Team 3 - entrant_3>'])
self.assertContains(response, 'Team')
self.assertContains(response, 'Manager')
self.assertContains(response, 'Entry Paid')
self.assertContains(response, 'Score')
self.assertContains(response, 'Head To Head Score')
self.assertContains(response, 'Team 1')
self.assertContains(response, 'Test User 1')
self.assertContains(response, 'False')
self.assertContains(response, 0)
self.assertContains(response, 'Team 2')
self.assertContains(response, 'Test User 2')
self.assertContains(response, 'True')
self.assertContains(response, 2)
self.assertContains(response, 'Team 3')
self.assertContains(response, 'Test User 3')
self.assertContains(response, 3)
def test_payouts(self):
User = get_user_model()
entrant_1 = User.objects.create(username='entrant_1', first_name='Test', last_name='User 1')
entrant_2 = User.objects.create(username='entrant_2', first_name='Test', last_name='User 2')
entrant_3 = User.objects.create(username='entrant_3', first_name='Test', last_name='User 3')
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-13')
league = League.objects.create(name='Test League', entry_fee=10, season=season)
LeagueEntrant.objects.bulk_create([
LeagueEntrant(entrant=entrant_1, league=league, paid_entry=False),
LeagueEntrant(entrant=entrant_2, league=league, paid_entry=True),
LeagueEntrant(entrant=entrant_3, league=league, paid_entry=True)
])
head_to_head_league = HeadToHeadLeague.objects.create(league=league, fpl_league_id=1)
gameweek_1 = Gameweek.objects.create(number=1, start_date='2017-08-01', end_date='2017-08-02', season=season)
gameweek_2 = Gameweek.objects.create(number=2, start_date='2017-08-08', end_date='2017-08-09', season=season)
gameweek_3 = Gameweek.objects.create(number=3, start_date='2017-08-15', end_date='2017-08-16', season=season)
payout_1 = HeadToHeadPayout.objects.create(
league=head_to_head_league.league,
name='Test Payout 1',
amount=10,
position=1,
start_date=gameweek_1.start_date,
end_date=gameweek_1.end_date,
winner=entrant_1,
paid_out=True
)
payout_2 = HeadToHeadPayout.objects.create(
league=head_to_head_league.league,
name='Test Payout 2',
amount=20,
position=2,
start_date=gameweek_2.start_date,
end_date=gameweek_2.end_date,
winner=entrant_2,
paid_out=False
)
response = self.client.get(reverse('fpl:season:head-to-head:detail', args=[season.pk, head_to_head_league.pk]))
self.assertQuerysetEqual(response.context['object'].league.payout_set.all().order_by('start_date'),
['<Payout: (2017-08-01 - 2018-05-13) - Test League - Test Payout 1 Position 1 (2017-08-01-2017-08-02): 10.00>',
'<Payout: (2017-08-01 - 2018-05-13) - Test League - Test Payout 2 Position 2 (2017-08-08-2017-08-09): 20.00>'])
self.assertContains(response, 'Name')
self.assertContains(response, 'Position')
self.assertContains(response, 'Start Date')
self.assertContains(response, 'End Date')
self.assertContains(response, 'Amount')
self.assertContains(response, 'Winner')
self.assertContains(response, 'Paid Out')
self.assertContains(response, 'Test Payout 1')
self.assertContains(response, '1')
self.assertContains(response,
datetime.datetime.strptime(gameweek_1.start_date, '%Y-%m-%d').strftime('%b. %-d, %Y'))
self.assertContains(response,
datetime.datetime.strptime(gameweek_1.end_date, '%Y-%m-%d').strftime('%b. %-d, %Y'))
self.assertContains(response, '10.00')
self.assertContains(response, 'Test User 1')
self.assertContains(response, 'True')
self.assertContains(response, 'Test Payout 2')
self.assertContains(response, '1')
self.assertContains(response,
datetime.datetime.strptime(gameweek_2.start_date, '%Y-%m-%d').strftime('%b. %-d, %Y'))
self.assertContains(response,
datetime.datetime.strptime(gameweek_2.end_date, '%Y-%m-%d').strftime('%b. %-d, %Y'))
self.assertContains(response, '20.00')
self.assertContains(response, 'Test User 2')
self.assertContains(response, 'False')
def test_last_updated(self):
now = timezone.now()
season = Season.objects.create(start_date='2017-08-01', end_date='2018-05-13')
league = League.objects.create(name='Test League', entry_fee=10, season=season)
head_to_head_league = HeadToHeadLeague.objects.create(league=league, fpl_league_id=1, last_updated=now)
response = self.client.get(reverse('fpl:season:head-to-head:detail', args=[season.pk, head_to_head_league.pk]))
self.assertContains(response, 'Last Updated')
ampm = ''.join([i.lower() + '.' for i in now.strftime('%p')])
self.assertContains(response, now.strftime('%b. %-d, %Y, %-I:%M ' + ampm))
| 50.767142
| 164
| 0.638593
| 8,828
| 74,780
| 5.185773
| 0.026846
| 0.065312
| 0.02512
| 0.022608
| 0.945216
| 0.926868
| 0.909196
| 0.8955
| 0.881586
| 0.847182
| 0
| 0.054684
| 0.246323
| 74,780
| 1,472
| 165
| 50.80163
| 0.757594
| 0
| 0
| 0.716452
| 0
| 0.006065
| 0.100789
| 0.020259
| 0
| 0
| 0
| 0
| 0.168309
| 1
| 0.0326
| false
| 0
| 0.006823
| 0
| 0.04928
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4da6880e573cd010b2c72a8ee93a5a512c532ba3
| 109
|
py
|
Python
|
creds.py
|
feelqah/LinkedinCompanyScraper
|
d28357ebc85a83a1a695fd5ddac95a5a5160a59a
|
[
"Unlicense"
] | null | null | null |
creds.py
|
feelqah/LinkedinCompanyScraper
|
d28357ebc85a83a1a695fd5ddac95a5a5160a59a
|
[
"Unlicense"
] | null | null | null |
creds.py
|
feelqah/LinkedinCompanyScraper
|
d28357ebc85a83a1a695fd5ddac95a5a5160a59a
|
[
"Unlicense"
] | null | null | null |
linkedin_email = # place your linkedin login email
linkedin_password = # place your linkedin login password
| 54.5
| 57
| 0.807339
| 14
| 109
| 6.142857
| 0.428571
| 0.209302
| 0.395349
| 0.511628
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155963
| 109
| 2
| 57
| 54.5
| 0.934783
| 0.605505
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.5
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
4dc30b7dd02381a519093b97d2b44e73bfb87d54
| 383
|
py
|
Python
|
App/models.py
|
flaskxiangmu/taopiaopiao
|
18d5a0a41afd7978161ba1fefa5f0d55da352967
|
[
"Apache-2.0"
] | null | null | null |
App/models.py
|
flaskxiangmu/taopiaopiao
|
18d5a0a41afd7978161ba1fefa5f0d55da352967
|
[
"Apache-2.0"
] | null | null | null |
App/models.py
|
flaskxiangmu/taopiaopiao
|
18d5a0a41afd7978161ba1fefa5f0d55da352967
|
[
"Apache-2.0"
] | null | null | null |
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class Hello(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(64))
age = db.Column(db.Integer)
class Cat(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(64))
age = db.Column(db.Integer)
| 21.277778
| 68
| 0.694517
| 57
| 383
| 4.614035
| 0.350877
| 0.18251
| 0.228137
| 0.258555
| 0.752852
| 0.752852
| 0.752852
| 0.752852
| 0.752852
| 0.752852
| 0
| 0.012539
| 0.167102
| 383
| 17
| 69
| 22.529412
| 0.811912
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.9
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
1517c49490effd48bff6d47667de07fb5ad63e02
| 107
|
py
|
Python
|
backend/common/context_processors.py
|
cody-robertson/django-react-messaging
|
a58d42292000bfac106f4f85a7fd5ee2592d29a6
|
[
"MIT"
] | null | null | null |
backend/common/context_processors.py
|
cody-robertson/django-react-messaging
|
a58d42292000bfac106f4f85a7fd5ee2592d29a6
|
[
"MIT"
] | 2
|
2021-04-28T16:15:49.000Z
|
2021-04-28T16:15:59.000Z
|
backend/common/context_processors.py
|
cody-robertson/django-react-messaging
|
a58d42292000bfac106f4f85a7fd5ee2592d29a6
|
[
"MIT"
] | null | null | null |
from django.conf import settings
def commit_sha(request):
return {"COMMIT_SHA": settings.COMMIT_SHA}
| 17.833333
| 46
| 0.766355
| 15
| 107
| 5.266667
| 0.666667
| 0.341772
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140187
| 107
| 5
| 47
| 21.4
| 0.858696
| 0
| 0
| 0
| 0
| 0
| 0.093458
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
129f47524384169c51cf11225a0501fb7e6b4037
| 26,551
|
py
|
Python
|
cotidia/fabfile.py
|
guillaumepiot/cotidia-crm
|
9f46d3b4fb1bc84f553c649e121b945e68a0bdab
|
[
"BSD-3-Clause"
] | null | null | null |
cotidia/fabfile.py
|
guillaumepiot/cotidia-crm
|
9f46d3b4fb1bc84f553c649e121b945e68a0bdab
|
[
"BSD-3-Clause"
] | null | null | null |
cotidia/fabfile.py
|
guillaumepiot/cotidia-crm
|
9f46d3b4fb1bc84f553c649e121b945e68a0bdab
|
[
"BSD-3-Clause"
] | null | null | null |
# Run as local
import sys, random
from fabric.api import *
from os.path import exists
def add_module():
prompt('Specify exiting app name:', 'app_name', validate=r'^[a-z\_]+$')
prompt('Specify new module name (model class name):', 'module_name', validate=r'^[a-zA-Z\_]+$')
with settings(warn_only=True):
# Rename the project folder
if local("cd %s" % env.app_name).failed:
print("!!! App name does not exist !!! - Use python manage.py startapp to create a new app")
#
# Add the tests
#
if local("cd %s/tests" % env.app_name).failed:
local("mkdir %s/tests"% env.app_name)
if local('echo "from .admin import *\nfrom .public import *" > %s/tests/__init__.py' % env.app_name).failed:
local("touch %s/tests/__init__.py"% env.app_name)
if local("cd %s/tests/admin" % env.app_name).failed:
local("mkdir %s/tests/admin"% env.app_name)
if local("cat %s/tests/admin/__init__.py" % env.app_name).failed:
local("touch %s/tests/admin/__init__.py"% env.app_name)
if local("cd %s/tests/public" % env.app_name).failed:
local("mkdir %s/tests/public"% env.app_name)
if not exists("%s/tests/public/__init__.py" % env.app_name):
local("touch %s/tests/public/__init__.py"% env.app_name)
if not exists("%s/tests/admin/%s.py" % (env.app_name, env.module_name.lower())):
local('curl https://gist.githubusercontent.com/guillaumepiot/b264c44696663678dc89/raw/tests.py > %s/tests/admin/%s.py' \
% (env.app_name, env.module_name.lower()))
local("sed -i .bak 's/from app.models import Model/from %s.models import %s/g' %s/tests/admin/%s.py" % \
(env.app_name, env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelTests/%sTests/g' %s/tests/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/_model/_%s/g' %s/tests/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-admin/%s-admin/g' %s/tests/admin/%s.py" % \
(env.app_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-admin/%s-admin/g' %s/tests/admin/%s.py" % \
(env.app_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-add/%s-add/g' %s/tests/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-update/%s-update/g' %s/tests/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-detail/%s-detail/g' %s/tests/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-list/%s-list/g' %s/tests/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-delete/%s-delete/g' %s/tests/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/Model./%s./g' %s/tests/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("rm %s/tests/admin/%s.py.bak" % (env.app_name, env.module_name.lower()))
#
# Add the URLs
#
if local("cd %s/urls" % env.app_name).failed:
local("mkdir %s/urls"% env.app_name)
if local("cat %s/urls/__init__.py" % env.app_name).failed:
local("touch %s/urls/__init__.py"% env.app_name)
if local("cd %s/urls/admin" % env.app_name).failed:
local("mkdir %s/urls/admin"% env.app_name)
if local("cat %s/urls/admin/__init__.py" % env.app_name).failed:
local("touch %s/urls/admin/__init__.py"% env.app_name)
if local("cd %s/urls/public" % env.app_name).failed:
local("mkdir %s/urls/public"% env.app_name)
if not exists("%s/urls/public/__init__.py" % env.app_name):
local("touch %s/urls/public/__init__.py"% env.app_name)
if not exists("%s/urls/admin/%s.py" % (env.app_name, env.module_name.lower())):
local('curl https://gist.githubusercontent.com/guillaumepiot/b264c44696663678dc89/raw/urls.py > %s/urls/admin/%s.py' \
% (env.app_name, env.module_name.lower()))
local("sed -i .bak 's/from app.views.admin import/from %s.views.admin.%s import/g' %s/urls/admin/%s.py" % \
(env.app_name, env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-add/%s-add/g' %s/urls/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-update/%s-update/g' %s/urls/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-detail/%s-detail/g' %s/urls/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-list/%s-list/g' %s/urls/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-delete/%s-delete/g' %s/urls/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelCreate/%sCreate/g' %s/urls/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelUpdate/%sUpdate/g' %s/urls/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelDetail/%sDetail/g' %s/urls/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelList/%sList/g' %s/urls/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelDelete/%sDelete/g' %s/urls/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/Model management/%s management/g' %s/urls/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("rm %s/urls/admin/%s.py.bak" % (env.app_name, env.module_name.lower()))
#
# Add the Views
#
if local("cd %s/views" % env.app_name).failed:
local("mkdir %s/views"% env.app_name)
if local("cat %s/views/__init__.py" % env.app_name).failed:
local("touch %s/views/__init__.py"% env.app_name)
if local("cd %s/views/admin" % env.app_name).failed:
local("mkdir %s/views/admin"% env.app_name)
if local("cat %s/views/admin/__init__.py" % env.app_name).failed:
local("touch %s/views/admin/__init__.py"% env.app_name)
if local("cd %s/views/public" % env.app_name).failed:
local("mkdir %s/views/public"% env.app_name)
if not exists("%s/views/public/__init__.py" % env.app_name):
local("touch %s/views/public/__init__.py"% env.app_name)
if not exists("%s/views/admin/%s.py" % (env.app_name, env.module_name.lower())):
local('curl https://gist.githubusercontent.com/guillaumepiot/b264c44696663678dc89/raw/views.py > %s/views/admin/%s.py' \
% (env.app_name, env.module_name.lower()))
local("sed -i .bak 's/from app.models import Model/from %s.models import %s/g' %s/views/admin/%s.py" % \
(env.app_name, env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/from app.forms.admin.model/from %s.forms.admin.%s/g' %s/views/admin/%s.py" % \
(env.app_name, env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelAddForm/%sAddForm/g' %s/views/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-add/%s-add/g' %s/views/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-update/%s-update/g' %s/views/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-detail/%s-detail/g' %s/views/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-list/%s-list/g' %s/views/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-delete/%s-delete/g' %s/views/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model-admin/%s-admin/g' %s/views/admin/%s.py" % \
(env.app_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelCreate/%sCreate/g' %s/views/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelUpdate/%sUpdate/g' %s/views/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelDetail/%sDetail/g' %s/views/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelList/%sList/g' %s/views/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelDelete/%sDelete/g' %s/views/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/Model management/%s management/g' %s/views/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/Model/%s/g' %s/views/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/admin\/app\/model/admin\/%s\/%s/g' %s/views/admin/%s.py" % \
(env.app_name, env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model_list/%s_list/g' %s/views/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model_detail/%s_detail/g' %s/views/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model_form/%s_form/g' %s/views/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model_confirm_delete/%s_confirm_delete/g' %s/views/admin/%s.py" % \
(env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/app.change_model/%s.change_%s/g' %s/views/admin/%s.py" % \
(env.app_name, env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/app.add_model/%s.add_%s/g' %s/views/admin/%s.py" % \
(env.app_name, env.module_name.lower(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/app.delete_model/%s.delete_%s/g' %s/views/admin/%s.py" % \
(env.app_name, env.module_name.lower(), env.app_name, env.module_name.lower()))
local("rm %s/views/admin/%s.py.bak" % (env.app_name, env.module_name.lower()))
#
# Add the Forms
#
if local("cd %s/forms" % env.app_name).failed:
local("mkdir %s/forms"% env.app_name)
if local("cat %s/forms/__init__.py" % env.app_name).failed:
local("touch %s/forms/__init__.py"% env.app_name)
if local("cd %s/forms/admin" % env.app_name).failed:
local("mkdir %s/forms/admin"% env.app_name)
if local("cat %s/forms/admin/__init__.py" % env.app_name).failed:
local("touch %s/forms/admin/__init__.py"% env.app_name)
if local("cd %s/forms/public" % env.app_name).failed:
local("mkdir %s/forms/public"% env.app_name)
if not exists("%s/forms/public/__init__.py" % env.app_name):
local("touch %s/forms/public/__init__.py"% env.app_name)
if not exists("%s/forms/admin/%s.py" % (env.app_name, env.module_name.lower())):
local('curl https://gist.githubusercontent.com/guillaumepiot/b264c44696663678dc89/raw/forms.py > %s/forms/admin/%s.py' \
% (env.app_name, env.module_name.lower()))
local("sed -i .bak 's/from app.models import Model/from %s.models import %s/g' %s/forms/admin/%s.py" % \
(env.app_name, env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/MODEL FORMS/%s FORMS/g' %s/forms/admin/%s.py" % \
(env.module_name.upper(), env.app_name, env.module_name.lower()))
local("sed -i .bak 's/model = Model/model = %s/g' %s/forms/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/Model\./%s\./g' %s/forms/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelAddForm/%sAddForm/g' %s/forms/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("sed -i .bak 's/ModelUpdateForm/%sUpdateForm/g' %s/forms/admin/%s.py" % \
(env.module_name, env.app_name, env.module_name.lower()))
local("rm %s/forms/admin/%s.py.bak" % (env.app_name, env.module_name.lower()))
#
# Add the Templates
#
if local("cd %s/templates" % env.app_name).failed:
local("mkdir %s/templates"% env.app_name)
if local("cd %s/templates/admin" % env.app_name).failed:
local("mkdir %s/templates/admin"% env.app_name)
if local("cd %s/templates/admin/%s" % (env.app_name, env.app_name)).failed:
local("mkdir %s/templates/admin/%s"% (env.app_name, env.app_name))
if local("cd %s/templates/admin/%s/%s" % (env.app_name, env.app_name, env.module_name.lower())).failed:
local("mkdir %s/templates/admin/%s/%s"% (env.app_name, env.app_name, env.module_name.lower()))
if not exists("%s/templates/admin/%s/%s/%s_form.html" % (env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower())):
local('curl https://gist.githubusercontent.com/guillaumepiot/b264c44696663678dc89/raw/model_form.html > %s/templates/admin/%s/%s/%s_form.html' \
% (env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/Edit Model/Edit %s/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/Add a Model/Add a %s/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Models\"/trans \"%s\"/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Model detail\"/trans \"%s detail\"/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-add/%s-add/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-update/%s-update/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-detail/%s-detail/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-list/%s-list/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-delete/%s-delete/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-admin/%s-admin/g' %s/templates/admin/%s/%s/%s_form.html" % \
(env.app_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("rm %s/templates/admin/%s/%s/%s_form.html.bak" % \
(env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
if not exists("%s/templates/admin/%s/%s/%s_list.html" % (env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower())):
local('curl https://gist.githubusercontent.com/guillaumepiot/b264c44696663678dc89/raw/model_list.html > %s/templates/admin/%s/%s/%s_list.html' \
% (env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Models\"/trans \"%s\"/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Add a model\"/trans \"Add a %s\"/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-add/%s-add/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-update/%s-update/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-detail/%s-detail/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-list/%s-list/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-delete/%s-delete/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-admin/%s-admin/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.app_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/perms.app.add_model/perms.%s.add_%s/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.app_name, env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/Add a Model/Add a %s/g' %s/templates/admin/%s/%s/%s_list.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("rm %s/templates/admin/%s/%s/%s_list.html.bak" % \
(env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
if not exists("%s/templates/admin/%s/%s/%s_detail.html" % (env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower())):
local('curl https://gist.githubusercontent.com/guillaumepiot/b264c44696663678dc89/raw/model_detail.html > %s/templates/admin/%s/%s/%s_detail.html' \
% (env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Models\"/trans \"%s\"/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/Edit Model/Edit %s/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/Model details/%s details/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/Delete Model/Delete %s/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Model details\"/trans \"%s details\"/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Edit model\"/trans \"Edit %s\"/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-add/%s-add/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-update/%s-update/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-detail/%s-detail/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-list/%s-list/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-delete/%s-delete/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-admin/%s-admin/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.app_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/perms.app.delete_model/perms.%s.delete_%s/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.app_name, env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/perms.app.change_model/perms.%s.change_%s/g' %s/templates/admin/%s/%s/%s_detail.html" % \
(env.app_name, env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("rm %s/templates/admin/%s/%s/%s_detail.html.bak" % \
(env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
if not exists("%s/templates/admin/%s/%s/%s_confirm_delete.html" % (env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower())):
local('curl https://gist.githubusercontent.com/guillaumepiot/b264c44696663678dc89/raw/model_confirm_delete.html > %s/templates/admin/%s/%s/%s_confirm_delete.html' \
% (env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Models\"/trans \"%s\"/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Model\"/trans \"%s\"/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/trans \"Delete model\"/trans \"Delete %s\"/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/Delete Model/Delete %s/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-add/%s-add/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-update/%s-update/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-detail/%s-detail/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-list/%s-list/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-delete/%s-delete/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/model-admin/%s-admin/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.app_name, env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("sed -i .bak 's/following model/following %s/g' %s/templates/admin/%s/%s/%s_confirm_delete.html" % \
(env.module_name.lower(), env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
local("rm %s/templates/admin/%s/%s/%s_confirm_delete.html.bak" % \
(env.app_name, env.app_name, env.module_name.lower(), env.module_name.lower()))
| 64.13285
| 176
| 0.618809
| 4,237
| 26,551
| 3.704744
| 0.026434
| 0.174556
| 0.225266
| 0.264891
| 0.949162
| 0.939989
| 0.939989
| 0.937504
| 0.884946
| 0.868128
| 0
| 0.0059
| 0.182893
| 26,551
| 413
| 177
| 64.288136
| 0.717631
| 0.004181
| 0
| 0.373737
| 0
| 0.326599
| 0.37605
| 0.180872
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003367
| true
| 0
| 0.026936
| 0
| 0.030303
| 0.003367
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
12c7a672f93542c40637bcf90c5ccfecca68379d
| 2,996
|
py
|
Python
|
tests/testapp/tests/test_syncing_models.py
|
jredrejo/morango
|
c3ec2554b026f65ac5f0fc5c9d439277fbac14f9
|
[
"MIT"
] | null | null | null |
tests/testapp/tests/test_syncing_models.py
|
jredrejo/morango
|
c3ec2554b026f65ac5f0fc5c9d439277fbac14f9
|
[
"MIT"
] | 6
|
2020-05-13T07:50:37.000Z
|
2020-06-04T18:23:03.000Z
|
tests/testapp/tests/test_syncing_models.py
|
jredrejo/morango
|
c3ec2554b026f65ac5f0fc5c9d439277fbac14f9
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from facility_profile.models import MyUser
from morango.manager import SyncableModelManager
from morango.query import SyncableModelQuerySet
class SyncingModelsTestCase(TestCase):
def setUp(self):
MyUser.objects.create(username='beans')
def test_syncable_manager_inheritance(self):
self.assertTrue(isinstance(MyUser.objects, SyncableModelManager))
def test_syncable_qs_inheritance(self):
self.assertTrue(isinstance(MyUser.objects.all(), SyncableModelQuerySet))
def test_syncable_manager_update(self):
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.update(update_dirty_bit_to=False)
self.assertFalse(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.update(update_dirty_bit_to=None)
self.assertFalse(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.update(update_dirty_bit_to=True)
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.update(update_dirty_bit_to=False)
self.assertFalse(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.update()
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.update(update_dirty_bit_to=None)
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
def test_syncable_qs_update(self):
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.all().update(update_dirty_bit_to=False)
self.assertFalse(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.all().update(update_dirty_bit_to=None)
self.assertFalse(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.all().update(update_dirty_bit_to=True)
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.all().update(update_dirty_bit_to=False)
self.assertFalse(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.all().update()
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
MyUser.objects.all().update(update_dirty_bit_to=None)
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
def test_syncable_save(self):
user = MyUser.objects.first()
self.assertTrue(user._morango_dirty_bit)
user.save(update_dirty_bit_to=False)
self.assertFalse(MyUser.objects.first()._morango_dirty_bit)
user.save(update_dirty_bit_to=None)
self.assertFalse(MyUser.objects.first()._morango_dirty_bit)
user.save(update_dirty_bit_to=True)
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
user.save(update_dirty_bit_to=False)
self.assertFalse(MyUser.objects.first()._morango_dirty_bit)
user.save()
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
user.save(update_dirty_bit_to=None)
self.assertTrue(MyUser.objects.first()._morango_dirty_bit)
| 47.555556
| 80
| 0.742991
| 373
| 2,996
| 5.63807
| 0.107239
| 0.222539
| 0.179743
| 0.237756
| 0.816928
| 0.816928
| 0.816928
| 0.767475
| 0.767475
| 0.767475
| 0
| 0
| 0.151202
| 2,996
| 62
| 81
| 48.322581
| 0.826976
| 0
| 0
| 0.592593
| 0
| 0
| 0.001669
| 0
| 0
| 0
| 0
| 0
| 0.425926
| 1
| 0.111111
| false
| 0
| 0.074074
| 0
| 0.203704
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
12f269204afcc067e9c729460fea12dbbdcb4809
| 4,464
|
py
|
Python
|
HLTriggerOffline/Top/python/topDiLeptonHLTEventValidation_cfi.py
|
NTrevisani/cmssw
|
a212a27526f34eb9507cf8b875c93896e6544781
|
[
"Apache-2.0"
] | 3
|
2018-08-24T19:10:26.000Z
|
2019-02-19T11:45:32.000Z
|
HLTriggerOffline/Top/python/topDiLeptonHLTEventValidation_cfi.py
|
NTrevisani/cmssw
|
a212a27526f34eb9507cf8b875c93896e6544781
|
[
"Apache-2.0"
] | 7
|
2016-07-17T02:34:54.000Z
|
2019-08-13T07:58:37.000Z
|
HLTriggerOffline/Top/python/topDiLeptonHLTEventValidation_cfi.py
|
NTrevisani/cmssw
|
a212a27526f34eb9507cf8b875c93896e6544781
|
[
"Apache-2.0"
] | 5
|
2018-08-21T16:37:52.000Z
|
2020-01-09T13:33:17.000Z
|
import FWCore.ParameterSet.Config as cms
# ttbar dimuon
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
DiMuonHLTValidation = DQMEDAnalyzer('TopDiLeptonHLTValidation',
# Directory
sDir = cms.untracked.string('HLT/TopHLTValidation/Top/DiMuon/'),
# Electrons
sElectrons = cms.untracked.string('gedGsfElectrons'),
ptElectrons = cms.untracked.double(20.),
etaElectrons = cms.untracked.double(2.5),
isoElectrons = cms.untracked.double(0.15),
minElectrons = cms.untracked.uint32(0),
# Muons
sMuons = cms.untracked.string('muons'),
ptMuons = cms.untracked.double(20.),
etaMuons = cms.untracked.double(2.4),
isoMuons = cms.untracked.double(0.2),
minMuons = cms.untracked.uint32(2),
# Jets
sJets = cms.untracked.string('ak4PFJetsCHS'),
ptJets = cms.untracked.double(30.),
etaJets = cms.untracked.double(2.5),
minJets = cms.untracked.uint32(2),
# Trigger
iTrigger = cms.untracked.InputTag("TriggerResults","","HLT"),
### Updating to HLT paths to be monitored by TOP PAG in 2017
vsPaths = cms.untracked.vstring(['HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass8_v',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass3p8_v']),
)
# ttbar dielec
DiElectronHLTValidation = DQMEDAnalyzer('TopDiLeptonHLTValidation',
# Directory
sDir = cms.untracked.string('HLT/TopHLTValidation/Top/DiElectron/'),
# Electrons
sElectrons = cms.untracked.string('gedGsfElectrons'),
ptElectrons = cms.untracked.double(20.),
etaElectrons = cms.untracked.double(2.5),
isoElectrons = cms.untracked.double(0.15),
minElectrons = cms.untracked.uint32(2),
# Muons
sMuons = cms.untracked.string('muons'),
ptMuons = cms.untracked.double(20.),
etaMuons = cms.untracked.double(2.4),
isoMuons = cms.untracked.double(0.2),
minMuons = cms.untracked.uint32(0),
# Jets
sJets = cms.untracked.string('ak4PFJetsCHS'),
ptJets = cms.untracked.double(30.),
etaJets = cms.untracked.double(2.5),
minJets = cms.untracked.uint32(2),
# Trigger
iTrigger = cms.untracked.InputTag("TriggerResults","","HLT"),
### Updating to HLT paths to be monitored by TOP PAG in 2017
vsPaths = cms.untracked.vstring(['HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_v',
'HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v']),
)
# ttbar elec-muon
ElecMuonHLTValidation = DQMEDAnalyzer('TopDiLeptonHLTValidation',
# Directory
sDir = cms.untracked.string('HLT/TopHLTValidation/Top/ElecMuon/'),
# Electrons
sElectrons = cms.untracked.string('gedGsfElectrons'),
ptElectrons = cms.untracked.double(20.),
etaElectrons = cms.untracked.double(2.5),
isoElectrons = cms.untracked.double(0.15),
minElectrons = cms.untracked.uint32(1),
# Muons
sMuons = cms.untracked.string('muons'),
ptMuons = cms.untracked.double(20.),
etaMuons = cms.untracked.double(2.4),
isoMuons = cms.untracked.double(0.2),
minMuons = cms.untracked.uint32(1),
# Jets
sJets = cms.untracked.string('ak4PFJetsCHS'),
ptJets = cms.untracked.double(30.),
etaJets = cms.untracked.double(2.5),
minJets = cms.untracked.uint32(2),
# Trigger
iTrigger = cms.untracked.InputTag("TriggerResults","","HLT"),
### Updating to HLT paths to be monitored by TOP PAG in 2017
vsPaths = cms.untracked.vstring(['HLT_Mu12_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_v',
'HLT_Mu23_TrkIsoVVL_Ele12_CaloIdL_TrackIdL_IsoVL_v',
'HLT_Mu23_TrkIsoVVL_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v']),
)
| 50.727273
| 173
| 0.558692
| 415
| 4,464
| 5.889157
| 0.207229
| 0.250409
| 0.176759
| 0.069967
| 0.899345
| 0.890344
| 0.83838
| 0.83838
| 0.80401
| 0.80401
| 0
| 0.041244
| 0.337366
| 4,464
| 87
| 174
| 51.310345
| 0.78499
| 0.124104
| 0
| 0.619048
| 0
| 0
| 0.171133
| 0.132131
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.031746
| 0
| 0.031746
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
42676d99b6e9bb3450c180d20be5de98f8691ee4
| 117
|
py
|
Python
|
tests/parser/depgraph.4.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/depgraph.4.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/depgraph.4.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
p(1) :- #count{X:q(X)}=1.
q(X) :- p(X).
"""
output = """
p(1) :- #count{X:q(X)}=1.
q(X) :- p(X).
"""
| 13
| 26
| 0.350427
| 24
| 117
| 1.708333
| 0.291667
| 0.195122
| 0.341463
| 0.390244
| 0.731707
| 0.731707
| 0.731707
| 0.731707
| 0.731707
| 0.731707
| 0
| 0.043011
| 0.205128
| 117
| 8
| 27
| 14.625
| 0.397849
| 0
| 0
| 0.75
| 0
| 0
| 0.725664
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
4280922fccf235c01e3a6776485a9f77a01aaf91
| 2,535
|
py
|
Python
|
utils/initialize.py
|
AnshulPundhir/HyperparametricMetaFusionBlock
|
8edd70982f84d6a9cf619b2c93ac6d278274ba7c
|
[
"MIT"
] | null | null | null |
utils/initialize.py
|
AnshulPundhir/HyperparametricMetaFusionBlock
|
8edd70982f84d6a9cf619b2c93ac6d278274ba7c
|
[
"MIT"
] | null | null | null |
utils/initialize.py
|
AnshulPundhir/HyperparametricMetaFusionBlock
|
8edd70982f84d6a9cf619b2c93ac6d278274ba7c
|
[
"MIT"
] | null | null | null |
#Filename: initialize.py
#Institute: IIT Roorkee
import torch.nn as nn
import numpy as np
def weights_init_kaimingUniform(module):
for m in module.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_uniform_(m.weight, mode = 'fan_in', nonlinearity = 'relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.uniform_(m.weight, a = 0, b = 1)
nn.init.constant_(m.bias, val = 0.)
elif isinstance(m, nn.Linear):
nn.init.kaiming_uniform_(m.weight, mode = 'fan_in', nonlinearity = 'relu')
if m.bias is not None:
nn.init.constant_(m.bias, val = 0.)
def weights_init_kaimingNormal(module):
for m in module.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode = 'fan_in', nonlinearity = 'relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.constant_(m.bias, val = 0.)
elif isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight, mode = 'fan_in', nonlinearity = 'relu')
if m.bias is not None:
nn.init.constant_(m.bias, val = 0.)
def weights_init_xavierUniform(module):
for m in module.modules():
if isinstance(m, nn.Conv2d):
nn.init.xavier_uniform_(m.weight, gain = np.sqrt(2))
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.uniform_(m.weight, a = 0, b = 1)
nn.init.constant_(m.bias, val = 0.)
elif isinstance(m, nn.Linear):
nn.init.xavier_normal_(m.weight, gain = np.sqrt(2))
if m.bias is not None:
nn.init.constant_(m.bias, val = 0.)
def weights_init_xavierNormal(module):
for m in module.modules():
if isinstance(m, nn.Conv2d):
nn.init.xavier_normal_(m.weight, gain = np.sqrt(2))
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.constant_(m.bias, val = 0.)
elif isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight, gain = np.sqrt(2))
if m.bias is not None:
nn.init.constant_(m.bias, val = 0.)
| 34.256757
| 86
| 0.571598
| 361
| 2,535
| 3.891967
| 0.146814
| 0.102491
| 0.111032
| 0.128114
| 0.897509
| 0.897509
| 0.897509
| 0.897509
| 0.897509
| 0.897509
| 0
| 0.020305
| 0.300592
| 2,535
| 73
| 87
| 34.726027
| 0.772138
| 0.017751
| 0
| 0.851852
| 0
| 0
| 0.016077
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.037037
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
42a391e4bafd52ac14e8388042a1ad1bb042985e
| 248
|
py
|
Python
|
Python/met_brewer/__init__.py
|
vitusbenson/MetBrewer
|
082835323a60107b0d38b0901f003c6609cce84e
|
[
"CC0-1.0"
] | 570
|
2021-12-11T23:10:07.000Z
|
2022-03-31T17:51:47.000Z
|
Python/met_brewer/__init__.py
|
Donjae-Wong/MetBrewer
|
082835323a60107b0d38b0901f003c6609cce84e
|
[
"CC0-1.0"
] | 11
|
2021-12-13T14:28:42.000Z
|
2022-03-28T09:00:01.000Z
|
Python/met_brewer/__init__.py
|
Donjae-Wong/MetBrewer
|
082835323a60107b0d38b0901f003c6609cce84e
|
[
"CC0-1.0"
] | 57
|
2021-12-12T18:28:45.000Z
|
2022-03-29T19:27:47.000Z
|
from met_brewer.palettes import (
MET_PALETTES, COLORBLIND_PALETTES_NAMES, COLORBLIND_PALETTES,
met_brew, export, is_colorblind_friendly
)
MET_PALETTES
COLORBLIND_PALETTES_NAMES
COLORBLIND_PALETTES
met_brew
export
is_colorblind_friendly
| 17.714286
| 65
| 0.854839
| 31
| 248
| 6.354839
| 0.354839
| 0.365482
| 0.213198
| 0.294416
| 0.862944
| 0.862944
| 0.862944
| 0.862944
| 0.862944
| 0.862944
| 0
| 0
| 0.112903
| 248
| 13
| 66
| 19.076923
| 0.895455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.1
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
223bd9b48cf4bf82eba4ec94c3e1e436a8f9c07c
| 68,644
|
py
|
Python
|
benchmarks/SimResults/combinations_spec_ml/cmp_bwavesgccmcfleslie3d/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_ml/cmp_bwavesgccmcfleslie3d/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_ml/cmp_bwavesgccmcfleslie3d/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.062853,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.252056,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.335673,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.295198,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.511177,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.293174,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.09955,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.240329,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.94532,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0634158,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0107012,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.101067,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0791417,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.164483,
'Execution Unit/Register Files/Runtime Dynamic': 0.0898429,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.261438,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.661469,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.50829,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00178474,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00178474,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00155913,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00060609,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00113688,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00626549,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0169469,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0760809,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.8394,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.225952,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.258405,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.29637,
'Instruction Fetch Unit/Runtime Dynamic': 0.583651,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0910578,
'L2/Runtime Dynamic': 0.0149119,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.9597,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.32955,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0880819,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0880819,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.37734,
'Load Store Unit/Runtime Dynamic': 1.85203,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.217195,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.43439,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0770832,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0783381,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.300896,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0373754,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.592657,
'Memory Management Unit/Runtime Dynamic': 0.115713,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 22.8644,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.221243,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0177571,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.14974,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.388741,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 5.46334,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.025722,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.222892,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.136926,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.123069,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.198506,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.100199,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.421774,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.119763,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.29954,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0258682,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00516207,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0470388,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0381767,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.072907,
'Execution Unit/Register Files/Runtime Dynamic': 0.0433388,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.105529,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.270699,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.36408,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000937183,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000937183,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000842201,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000340203,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000548411,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00326498,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00805973,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0367002,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.33445,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.104961,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.124651,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.66626,
'Instruction Fetch Unit/Runtime Dynamic': 0.277637,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.041946,
'L2/Runtime Dynamic': 0.00699192,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.57227,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.652076,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0431954,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0431955,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.77625,
'Load Store Unit/Runtime Dynamic': 0.908297,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.106513,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.213025,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0378017,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0383785,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.145148,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0173641,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.366193,
'Memory Management Unit/Runtime Dynamic': 0.0557427,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.7397,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0680471,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00638066,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0617779,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.136206,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.74895,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0146946,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.214231,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0762249,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.103621,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.167136,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0843647,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.355122,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.106825,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.1679,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0144005,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00434631,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.037058,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0321437,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0514585,
'Execution Unit/Register Files/Runtime Dynamic': 0.03649,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0817452,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.21952,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.23074,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000780825,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000780825,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000696157,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000278277,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000461746,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00271955,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00691271,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0309005,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.96554,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0891683,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.104952,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.27945,
'Instruction Fetch Unit/Runtime Dynamic': 0.234653,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0410605,
'L2/Runtime Dynamic': 0.0105044,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.34204,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.54846,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0357467,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0357467,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.51084,
'Load Store Unit/Runtime Dynamic': 0.760498,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0881454,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.176291,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0312831,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0318681,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.12221,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0147115,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.332058,
'Memory Management Unit/Runtime Dynamic': 0.0465796,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.9208,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0378811,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00513608,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0527002,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0957174,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.37869,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0112371,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.211515,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.057366,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.070632,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.113927,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0575064,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.242065,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0719872,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.06656,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0108377,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00296262,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0257653,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0219104,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.036603,
'Execution Unit/Register Files/Runtime Dynamic': 0.024873,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0570901,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.157404,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.04123,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00036335,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00036335,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000322085,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000127752,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000314745,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00136353,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00328341,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.021063,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.33979,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0509907,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0715396,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.62333,
'Instruction Fetch Unit/Runtime Dynamic': 0.14824,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0372791,
'L2/Runtime Dynamic': 0.00906734,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.03791,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.399234,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0259074,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0259075,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.16025,
'Load Store Unit/Runtime Dynamic': 0.552909,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0638833,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.127767,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0226724,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.023226,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0833033,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00837784,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.278359,
'Memory Management Unit/Runtime Dynamic': 0.0316039,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.7552,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0285093,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00353367,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0361967,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0682397,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.85129,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 6.695877235603369,
'Runtime Dynamic': 6.695877235603369,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.334595,
'Runtime Dynamic': 0.085356,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 67.6147,
'Peak Power': 100.727,
'Runtime Dynamic': 12.5276,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 67.2801,
'Total Cores/Runtime Dynamic': 12.4423,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.334595,
'Total L3s/Runtime Dynamic': 0.085356,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.102845
| 124
| 0.682245
| 8,082
| 68,644
| 5.788666
| 0.067681
| 0.123461
| 0.112859
| 0.093365
| 0.938569
| 0.930703
| 0.917322
| 0.886521
| 0.862303
| 0.841677
| 0
| 0.132518
| 0.224215
| 68,644
| 914
| 125
| 75.102845
| 0.746005
| 0
| 0
| 0.642232
| 0
| 0
| 0.657076
| 0.048073
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58bab9c7bcdd00d0e604b33aace90c72691b1ce7
| 9,023
|
py
|
Python
|
tests/dhcpv6/options_validation/test_v6_subnet_options.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
tests/dhcpv6/options_validation/test_v6_subnet_options.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
tests/dhcpv6/options_validation/test_v6_subnet_options.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
"""DHCPv6 options defined in subnet"""
# pylint: disable=invalid-name,line-too-long
import pytest
import srv_control
import misc
import srv_msg
import references
@pytest.mark.v6
@pytest.mark.dhcp6
@pytest.mark.options
@pytest.mark.subnet
def test_v6_options_subnet_preference():
# Testing server ability to configure it with option
# preference (code 7) with value 123 per subnet(to override global)
# and ability to share that value with client via Advertise and Reply message.
# Client Server
# request option SOLICIT -->
# preference value 123 <-- ADVERTISE
# request option REQUEST -->
# preference value 123 <-- REPLY
# Pass Criteria:
# REPLY/ADVERTISE MUST include option:
# Preference option with value 123
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.config_srv('preference', '0', '123')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_requests_option('7')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '7')
srv_msg.response_check_option_content('Response', '7', None, 'value', '123')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_requests_option('7')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '7')
srv_msg.response_check_option_content('Response', '7', None, 'value', '123')
references.references_check('v6.options,')
@pytest.mark.v6
@pytest.mark.dhcp6
@pytest.mark.options
@pytest.mark.subnet
@pytest.mark.rfc3646
def test_v6_options_subnet_dns_servers():
# Testing server ability to configure it with option
# DNS servers (code 23) with addresses 2001:db8::1 per subnet(to override global)
# and ability to share that value with client via Advertise and Reply message.
# Client Server
# request option SOLICIT -->
# dns-servers <-- ADVERTISE
# request option REQUEST -->
# dns-servers <-- REPLY
# Pass Criteria:
# REPLY/ADVERTISE MUST include option:
# dns-servers option with addresses
# 2001:db8::1 and 2001:db8::2
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.config_srv('dns-servers', '0', '2001:db8::1,2001:db8::2')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_requests_option('23')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '23')
srv_msg.response_check_option_content('Response',
'23',
None,
'addresses',
'2001:db8::1,2001:db8::2')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_requests_option('23')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '23')
srv_msg.response_check_option_content('Response',
'23',
None,
'addresses',
'2001:db8::1,2001:db8::2')
references.references_check('v6.options,')
@pytest.mark.v6
@pytest.mark.dhcp6
@pytest.mark.options
@pytest.mark.subnet
@pytest.mark.rfc3646
def test_v6_options_subnet_domains():
# Testing server ability to configure it with option
# domains (code 24) with domains domain1.example.com
# and domain2.isc.org, per subnet(to override global)
# and ability to share that value with client via Advertise and Reply message.
# Client Server
# request option SOLICIT -->
# domain-search <-- ADVERTISE
# request option REQUEST -->
# domain-search <-- REPLY
# Pass Criteria:
# REPLY/ADVERTISE MUST include option:
# domain-search option with addresses
# domain1.example.com and domain2.isc.org
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.config_srv('domain-search', '0', 'domain1.example.com,domain2.isc.org')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_requests_option('24')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '24')
srv_msg.response_check_option_content('Response',
'24',
None,
'domains',
'domain1.example.com.,domain2.isc.org.')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_requests_option('24')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '24')
srv_msg.response_check_option_content('Response',
'24',
None,
'domains',
'domain1.example.com.,domain2.isc.org.')
references.references_check('v6.options,')
@pytest.mark.v6
@pytest.mark.dhcp6
@pytest.mark.options
@pytest.mark.subnet
@pytest.mark.rfc3646
def test_v6_options_subnet_override():
# Testing server ability to configure it with option
# domains (code 24) with domains subnet.example.com per subnet
# (to override global which is also configured with domain global.example.com)
# and ability to share that value with client via Advertise and Reply message.
# Client Server
# request option SOLICIT -->
# domain-search <-- ADVERTISE
# request option REQUEST -->
# domain-search <-- REPLY
# Pass Criteria:
# REPLY/ADVERTISE MUST include option:
# domain-search option with addresses
# subnet.example.com
# REPLY/ADVERTISE MUST NOT include option:
# domain-search option with addresses
# global.example.com
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.config_srv_opt('domain-search', 'global.example.com')
srv_control.config_srv('domain-search', '0', 'subnet.example.com')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_requests_option('24')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '24')
srv_msg.response_check_option_content('Response', '24', None, 'domains', 'subnet.example.com.')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_requests_option('24')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '24')
srv_msg.response_check_option_content('Response', '24', None, 'domains', 'subnet.example.com.')
references.references_check('v6.options,')
| 38.725322
| 99
| 0.654328
| 1,148
| 9,023
| 4.879791
| 0.092334
| 0.065334
| 0.077115
| 0.054266
| 0.90075
| 0.877365
| 0.869154
| 0.841485
| 0.808997
| 0.808997
| 0
| 0.032475
| 0.225313
| 9,023
| 232
| 100
| 38.892241
| 0.768956
| 0.237504
| 0
| 0.903448
| 0
| 0
| 0.169892
| 0.026092
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027586
| true
| 0.055172
| 0.034483
| 0
| 0.062069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
4523c25d5a8c9887e3a52eb58626e94292be05c9
| 47,926
|
py
|
Python
|
arista/studio/v1/services/gen_pb2_grpc.py
|
barryCrunch/cloudvision-python
|
bafb55a57743141ef419ce8b6f3adda31a18ca42
|
[
"Apache-2.0"
] | 8
|
2020-10-22T13:19:00.000Z
|
2021-12-16T02:16:47.000Z
|
arista/studio/v1/services/gen_pb2_grpc.py
|
barryCrunch/cloudvision-python
|
bafb55a57743141ef419ce8b6f3adda31a18ca42
|
[
"Apache-2.0"
] | 6
|
2020-12-16T11:31:03.000Z
|
2021-11-19T10:00:37.000Z
|
arista/studio/v1/services/gen_pb2_grpc.py
|
barryCrunch/cloudvision-python
|
bafb55a57743141ef419ce8b6f3adda31a18ca42
|
[
"Apache-2.0"
] | 7
|
2020-12-04T01:30:34.000Z
|
2021-11-11T21:40:12.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from arista.studio.v1.services import gen_pb2 as arista_dot_studio_dot_v1_dot_services_dot_gen__pb2
class AssignedTagsServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.studio.v1.AssignedTagsService/GetOne',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.studio.v1.AssignedTagsService/GetAll',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.studio.v1.AssignedTagsService/Subscribe',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamResponse.FromString,
)
class AssignedTagsServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_AssignedTagsServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.studio.v1.AssignedTagsService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class AssignedTagsService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.AssignedTagsService/GetOne',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.AssignedTagsService/GetAll',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.AssignedTagsService/Subscribe',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class AssignedTagsConfigServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.studio.v1.AssignedTagsConfigService/GetOne',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.studio.v1.AssignedTagsConfigService/GetAll',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.studio.v1.AssignedTagsConfigService/Subscribe',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamResponse.FromString,
)
self.Set = channel.unary_unary(
'/arista.studio.v1.AssignedTagsConfigService/Set',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigSetRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigSetResponse.FromString,
)
self.Delete = channel.unary_unary(
'/arista.studio.v1.AssignedTagsConfigService/Delete',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigDeleteRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigDeleteResponse.FromString,
)
class AssignedTagsConfigServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Set(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_AssignedTagsConfigServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamResponse.SerializeToString,
),
'Set': grpc.unary_unary_rpc_method_handler(
servicer.Set,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigSetRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigSetResponse.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigDeleteRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigDeleteResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.studio.v1.AssignedTagsConfigService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class AssignedTagsConfigService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.AssignedTagsConfigService/GetOne',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.AssignedTagsConfigService/GetAll',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.AssignedTagsConfigService/Subscribe',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Set(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.AssignedTagsConfigService/Set',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigSetRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigSetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.AssignedTagsConfigService/Delete',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigDeleteRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.AssignedTagsConfigDeleteResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class InputsServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.studio.v1.InputsService/GetOne',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.studio.v1.InputsService/GetAll',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.studio.v1.InputsService/Subscribe',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamResponse.FromString,
)
class InputsServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_InputsServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.studio.v1.InputsService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class InputsService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.InputsService/GetOne',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.InputsService/GetAll',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.InputsService/Subscribe',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class InputsConfigServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.studio.v1.InputsConfigService/GetOne',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.studio.v1.InputsConfigService/GetAll',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.studio.v1.InputsConfigService/Subscribe',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamResponse.FromString,
)
self.Set = channel.unary_unary(
'/arista.studio.v1.InputsConfigService/Set',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigSetRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigSetResponse.FromString,
)
self.Delete = channel.unary_unary(
'/arista.studio.v1.InputsConfigService/Delete',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigDeleteRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigDeleteResponse.FromString,
)
class InputsConfigServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Set(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_InputsConfigServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamResponse.SerializeToString,
),
'Set': grpc.unary_unary_rpc_method_handler(
servicer.Set,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigSetRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigSetResponse.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigDeleteRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigDeleteResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.studio.v1.InputsConfigService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class InputsConfigService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.InputsConfigService/GetOne',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.InputsConfigService/GetAll',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.InputsConfigService/Subscribe',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Set(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.InputsConfigService/Set',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigSetRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigSetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.InputsConfigService/Delete',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigDeleteRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.InputsConfigDeleteResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class StudioServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.studio.v1.StudioService/GetOne',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.studio.v1.StudioService/GetAll',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.studio.v1.StudioService/Subscribe',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamResponse.FromString,
)
class StudioServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_StudioServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.studio.v1.StudioService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class StudioService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.StudioService/GetOne',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.StudioService/GetAll',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.StudioService/Subscribe',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class StudioConfigServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.studio.v1.StudioConfigService/GetOne',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.studio.v1.StudioConfigService/GetAll',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.studio.v1.StudioConfigService/Subscribe',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamResponse.FromString,
)
self.Set = channel.unary_unary(
'/arista.studio.v1.StudioConfigService/Set',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigSetRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigSetResponse.FromString,
)
self.Delete = channel.unary_unary(
'/arista.studio.v1.StudioConfigService/Delete',
request_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigDeleteRequest.SerializeToString,
response_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigDeleteResponse.FromString,
)
class StudioConfigServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Set(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_StudioConfigServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamResponse.SerializeToString,
),
'Set': grpc.unary_unary_rpc_method_handler(
servicer.Set,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigSetRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigSetResponse.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigDeleteRequest.FromString,
response_serializer=arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigDeleteResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.studio.v1.StudioConfigService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class StudioConfigService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.StudioConfigService/GetOne',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.StudioConfigService/GetAll',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.studio.v1.StudioConfigService/Subscribe',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Set(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.StudioConfigService/Set',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigSetRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigSetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.studio.v1.StudioConfigService/Delete',
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigDeleteRequest.SerializeToString,
arista_dot_studio_dot_v1_dot_services_dot_gen__pb2.StudioConfigDeleteResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 49.612836
| 142
| 0.700392
| 4,739
| 47,926
| 6.651614
| 0.031441
| 0.02779
| 0.068999
| 0.082799
| 0.972717
| 0.97129
| 0.969862
| 0.957014
| 0.957014
| 0.933697
| 0
| 0.009395
| 0.231586
| 47,926
| 965
| 143
| 49.664249
| 0.846553
| 0.064349
| 0
| 0.767263
| 1
| 0
| 0.080671
| 0.052385
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076726
| false
| 0
| 0.002558
| 0.030691
| 0.132992
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
18b97398dbc4791478a20ade427edd05785ca1ef
| 40,579
|
py
|
Python
|
src/oci/vulnerability_scanning/vulnerability_scanning_client_composite_operations.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/vulnerability_scanning/vulnerability_scanning_client_composite_operations.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/vulnerability_scanning/vulnerability_scanning_client_composite_operations.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
import oci # noqa: F401
from oci.util import WAIT_RESOURCE_NOT_FOUND # noqa: F401
class VulnerabilityScanningClientCompositeOperations(object):
"""
This class provides a wrapper around :py:class:`~oci.vulnerability_scanning.VulnerabilityScanningClient` and offers convenience methods
for operations that would otherwise need to be chained together. For example, instead of performing an action
on a resource (e.g. launching an instance, creating a load balancer) and then using a waiter to wait for the resource
to enter a given state, you can call a single method in this class to accomplish the same functionality
"""
def __init__(self, client, **kwargs):
"""
Creates a new VulnerabilityScanningClientCompositeOperations object
:param VulnerabilityScanningClient client:
The service client which will be wrapped by this object
"""
self.client = client
def create_container_scan_recipe_and_wait_for_state(self, create_container_scan_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_container_scan_recipe` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param oci.vulnerability_scanning.models.CreateContainerScanRecipeDetails create_container_scan_recipe_details: (required)
Details for the new ContainerScanRecipe
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_container_scan_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_container_scan_recipe(create_container_scan_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_container_scan_target_and_wait_for_state(self, create_container_scan_target_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_container_scan_target` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param oci.vulnerability_scanning.models.CreateContainerScanTargetDetails create_container_scan_target_details: (required)
Details for the new ContainerScanTarget
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_container_scan_target`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_container_scan_target(create_container_scan_target_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_host_scan_recipe_and_wait_for_state(self, create_host_scan_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_host_scan_recipe` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param oci.vulnerability_scanning.models.CreateHostScanRecipeDetails create_host_scan_recipe_details: (required)
Details for the new HostScanRecipe
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_host_scan_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_host_scan_recipe(create_host_scan_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_host_scan_target_and_wait_for_state(self, create_host_scan_target_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_host_scan_target` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param oci.vulnerability_scanning.models.CreateHostScanTargetDetails create_host_scan_target_details: (required)
Details for the new HostScanTarget
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_host_scan_target`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_host_scan_target(create_host_scan_target_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_container_scan_recipe_and_wait_for_state(self, container_scan_recipe_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_container_scan_recipe` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str container_scan_recipe_id: (required)
unique ContainerScanRecipe identifier
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_container_scan_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_container_scan_recipe(container_scan_recipe_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_container_scan_result_and_wait_for_state(self, container_scan_result_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_container_scan_result` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str container_scan_result_id: (required)
unique container scan identifier
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_container_scan_result`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_container_scan_result(container_scan_result_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_container_scan_target_and_wait_for_state(self, container_scan_target_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_container_scan_target` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str container_scan_target_id: (required)
unique ContainerScanTarget identifier
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_container_scan_target`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_container_scan_target(container_scan_target_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_host_agent_scan_result_and_wait_for_state(self, host_agent_scan_result_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_agent_scan_result` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str host_agent_scan_result_id: (required)
unique host agent scan identifier
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_agent_scan_result`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_host_agent_scan_result(host_agent_scan_result_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_host_cis_benchmark_scan_result_and_wait_for_state(self, host_cis_benchmark_scan_result_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_cis_benchmark_scan_result` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str host_cis_benchmark_scan_result_id: (required)
unique host Cis benchmark scan identifier
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_cis_benchmark_scan_result`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_host_cis_benchmark_scan_result(host_cis_benchmark_scan_result_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_host_endpoint_protection_scan_result_and_wait_for_state(self, host_endpoint_protection_scan_result_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_endpoint_protection_scan_result` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str host_endpoint_protection_scan_result_id: (required)
unique host endpoint protection scan identifier
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_endpoint_protection_scan_result`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_host_endpoint_protection_scan_result(host_endpoint_protection_scan_result_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_host_port_scan_result_and_wait_for_state(self, host_port_scan_result_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_port_scan_result` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str host_port_scan_result_id: (required)
unique host port scan identifier
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_port_scan_result`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_host_port_scan_result(host_port_scan_result_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_host_scan_recipe_and_wait_for_state(self, host_scan_recipe_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_scan_recipe` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str host_scan_recipe_id: (required)
unique HostScanRecipe identifier
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_scan_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_host_scan_recipe(host_scan_recipe_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_host_scan_target_and_wait_for_state(self, host_scan_target_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_scan_target` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str host_scan_target_id: (required)
unique HostScanTarget identifier
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.delete_host_scan_target`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_host_scan_target(host_scan_target_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_container_scan_recipe_and_wait_for_state(self, container_scan_recipe_id, update_container_scan_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.update_container_scan_recipe` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str container_scan_recipe_id: (required)
unique ContainerScanRecipe identifier
:param oci.vulnerability_scanning.models.UpdateContainerScanRecipeDetails update_container_scan_recipe_details: (required)
The information to be updated.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.update_container_scan_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_container_scan_recipe(container_scan_recipe_id, update_container_scan_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_container_scan_target_and_wait_for_state(self, container_scan_target_id, update_container_scan_target_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.update_container_scan_target` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str container_scan_target_id: (required)
unique ContainerScanTarget identifier
:param oci.vulnerability_scanning.models.UpdateContainerScanTargetDetails update_container_scan_target_details: (required)
The information to be updated.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.update_container_scan_target`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_container_scan_target(container_scan_target_id, update_container_scan_target_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_host_scan_recipe_and_wait_for_state(self, host_scan_recipe_id, update_host_scan_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.update_host_scan_recipe` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str host_scan_recipe_id: (required)
unique HostScanRecipe identifier
:param oci.vulnerability_scanning.models.UpdateHostScanRecipeDetails update_host_scan_recipe_details: (required)
The information to be updated.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.update_host_scan_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_host_scan_recipe(host_scan_recipe_id, update_host_scan_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_host_scan_target_and_wait_for_state(self, host_scan_target_id, update_host_scan_target_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.update_host_scan_target` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param str host_scan_target_id: (required)
unique HostScanTarget identifier
:param oci.vulnerability_scanning.models.UpdateHostScanTargetDetails update_host_scan_target_details: (required)
The information to be updated.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.update_host_scan_target`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_host_scan_target(host_scan_target_id, update_host_scan_target_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
| 53.74702
| 245
| 0.699524
| 5,128
| 40,579
| 5.256825
| 0.042902
| 0.03999
| 0.049189
| 0.046741
| 0.951441
| 0.942278
| 0.927551
| 0.922432
| 0.905887
| 0.901361
| 0
| 0.001534
| 0.228764
| 40,579
| 754
| 246
| 53.818302
| 0.859822
| 0.453634
| 0
| 0.843137
| 0
| 0
| 0.02598
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05042
| false
| 0
| 0.005602
| 0
| 0.179272
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18bd6765a1b678979ac89fd1b185feee4306eb48
| 956
|
py
|
Python
|
tests/test_snorse.py
|
pipermerriam/snorse
|
659e3585ea8cdefbeb22bf43883362bd30830376
|
[
"MIT"
] | 3
|
2015-07-16T05:18:33.000Z
|
2019-04-16T21:47:41.000Z
|
tests/test_snorse.py
|
pipermerriam/snorse
|
659e3585ea8cdefbeb22bf43883362bd30830376
|
[
"MIT"
] | null | null | null |
tests/test_snorse.py
|
pipermerriam/snorse
|
659e3585ea8cdefbeb22bf43883362bd30830376
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
import pytest
import snorse
DOT = "\u2603"
DASH = "\u26C4\u26C4\u26C4"
@pytest.mark.parametrize(
'text,expected',
(
('', ''),
('a', '\u2603 \u26C4\u26C4\u26C4'),
('abc', '\u2603 \u26C4\u26C4\u26C4 \u26C4\u26C4\u26C4 \u2603 \u2603 \u2603 \u26C4\u26C4\u26C4 \u2603 \u26C4\u26C4\u26C4 \u2603'),
('abc abc', '\u2603 \u26C4\u26C4\u26C4 \u26C4\u26C4\u26C4 \u2603 \u2603 \u2603 \u26C4\u26C4\u26C4 \u2603 \u26C4\u26C4\u26C4 \u2603 \u2603 \u26C4\u26C4\u26C4 \u26C4\u26C4\u26C4 \u2603 \u2603 \u2603 \u26C4\u26C4\u26C4 \u2603 \u26C4\u26C4\u26C4 \u2603'),
('a b c', '\u2603 \u26C4\u26C4\u26C4 \u26C4\u26C4\u26C4 \u2603 \u2603 \u2603 \u26C4\u26C4\u26C4 \u2603 \u26C4\u26C4\u26C4 \u2603'),
('a&', '\u2603 \u26C4\u26C4\u26C4 &'),
),
)
def test_conversion(text, expected):
actual = snorse.snorse(text)
assert actual == expected
| 39.833333
| 273
| 0.633891
| 120
| 956
| 5
| 0.2
| 0.7
| 0.675
| 0.466667
| 0.681667
| 0.613333
| 0.613333
| 0.613333
| 0.613333
| 0.613333
| 0
| 0.363281
| 0.196653
| 956
| 23
| 274
| 41.565217
| 0.417969
| 0
| 0
| 0.105263
| 0
| 0.157895
| 0.635983
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 1
| 0.052632
| false
| 0
| 0.157895
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18d434ec416199db163c26a185b593bd2b11d239
| 119
|
py
|
Python
|
01_Day_Introduction/2.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
01_Day_Introduction/2.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
01_Day_Introduction/2.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
print(
3 + 4,
3 - 4,
3 * 4,
3 / 4,
3 ** 4,
3 // 4,
3 % 4) # 2
| 13.222222
| 19
| 0.168067
| 16
| 119
| 1.25
| 0.25
| 0.7
| 0.9
| 1.2
| 0.7
| 0.7
| 0.7
| 0.7
| 0.7
| 0.7
| 0
| 0.394737
| 0.680672
| 119
| 8
| 20
| 14.875
| 0.131579
| 0.008403
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
18ef518ed288be9908803659afb8b662ba1f1c8f
| 27,797
|
py
|
Python
|
login/tests/test_client.py
|
shubhamkulkarni01/EMSTrack-Django
|
32ff9ed94a38730c0e9f6385c75060e2d30a930e
|
[
"MIT",
"BSD-3-Clause"
] | 2
|
2020-07-16T01:44:54.000Z
|
2020-10-25T02:08:47.000Z
|
login/tests/test_client.py
|
shubhamkulkarni01/EMSTrack-Django
|
32ff9ed94a38730c0e9f6385c75060e2d30a930e
|
[
"MIT",
"BSD-3-Clause"
] | 8
|
2020-04-20T22:13:56.000Z
|
2022-02-04T17:50:44.000Z
|
login/tests/test_client.py
|
shubhamkulkarni01/EMSTrack-Django
|
32ff9ed94a38730c0e9f6385c75060e2d30a930e
|
[
"MIT",
"BSD-3-Clause"
] | 2
|
2020-07-20T23:39:44.000Z
|
2022-02-24T00:29:10.000Z
|
import json
import logging
from io import BytesIO
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.db import transaction
from django.test import Client as DjangoClient
from rest_framework.parsers import JSONParser
from ambulance.models import Ambulance
from emstrack.tests.util import date2iso
from hospital.models import Hospital
from login.models import Client, ClientStatus, ClientLog, ClientActivity
from login.serializers import ClientSerializer
from login.tests.setup_data import TestSetup
logger = logging.getLogger(__name__)
class TestClient(TestSetup):
def testAmbulance(self):
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 1)
# go offline
client1.status = ClientStatus.F.name
client1.save()
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 2)
# go online with ambulance
client1.status = ClientStatus.O.name
client1.ambulance = self.a1
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, self.a1)
self.assertEqual(a.client, client1)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AI.name)
self.assertEqual(log.details, self.a1.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 4)
# go offline
client1.status = ClientStatus.F.name
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(a, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.AO.name)
self.assertEqual(log.details, self.a1.identifier)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 6)
# client online
client1.status = ClientStatus.O.name
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 7)
# login ambulance a1
client1.ambulance = self.a1
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, self.a1)
self.assertEqual(a.client, client1)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AI.name)
self.assertEqual(log.details, self.a1.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 9)
# logout ambulance
client1.ambulance = None
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(a, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AO.name)
self.assertEqual(log.details, self.a1.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 11)
# login ambulance a2
client1.ambulance = self.a2
client1.save()
a = Ambulance.objects.get(id=self.a2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, self.a2)
self.assertEqual(a.client, client1)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AI.name)
self.assertEqual(log.details, self.a2.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 13)
# go offline
client1.status = ClientStatus.F.name
client1.save()
a = Ambulance.objects.get(id=self.a2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(a, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.AO.name)
self.assertEqual(log.details, self.a2.identifier)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 15)
def testHospital(self):
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 1)
# go offline
client1.status = ClientStatus.F.name
client1.save()
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 2)
# go online with hospital
client1.status = ClientStatus.O.name
client1.hospital = self.h1
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.hospital, self.h1)
self.assertEqual(h.client, client1)
self.assertEqual(client1.ambulance, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HI.name)
self.assertEqual(log.details, self.h1.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 4)
# go offline
client1.status = ClientStatus.F.name
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(h, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HO.name)
self.assertEqual(log.details, self.h1.name)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 6)
# client online
client1.status = ClientStatus.O.name
client1.save()
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 7)
# login hospital a1
client1.hospital = self.h1
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.hospital, self.h1)
self.assertEqual(h.client, client1)
self.assertEqual(client1.ambulance, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HI.name)
self.assertEqual(log.details, self.h1.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 9)
# logout ambulance
client1.hospital = None
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(h, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HO.name)
self.assertEqual(log.details, self.h1.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 11)
# login ambulance a2
client1.hospital = self.h2
client1.save()
h = Hospital.objects.get(id=self.h2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.hospital, self.h2)
self.assertEqual(h.client, client1)
self.assertEqual(client1.ambulance, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HI.name)
self.assertEqual(log.details, self.h2.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 13)
# go offline
client1.status = ClientStatus.F.name
client1.save()
h = Hospital.objects.get(id=self.h2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(h, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HO.name)
self.assertEqual(log.details, self.h2.name)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 15)
def testPermissions(self):
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u2,
status=ClientStatus.O.name, ambulance=self.a1)
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u2,
status=ClientStatus.O.name, ambulance=self.a1, hospital=self.h1)
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u3,
status=ClientStatus.O.name, hospital=self.h1)
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u3,
status=ClientStatus.O.name, ambulance=self.a1, hospital=self.h1)
def testClientSerializer(self):
# test ClientSerializer
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name, ambulance=self.a1)
serializer = ClientSerializer(client1)
result = {
'client_id': client1.client_id,
'username': client1.user.username,
'status': client1.status,
'ambulance': client1.ambulance.id,
'hospital': None,
'updated_on': date2iso(client1.updated_on)
}
self.assertDictEqual(serializer.data, result)
# create client
serializer = ClientSerializer(data={
'client_id': 'client_id_3',
'status': ClientStatus.O.name,
'ambulance': None,
'hospital': None
})
if not serializer.is_valid():
logger.debug('errors = {}'.format(serializer.errors))
self.assertTrue(False)
serializer.save(user=self.u2)
client2 = Client.objects.get(client_id='client_id_3')
self.assertEqual(client2.status, ClientStatus.O.name)
self.assertEqual(client2.user, self.u2)
self.assertEqual(client2.ambulance, None)
self.assertEqual(client2.hospital, None)
# create client
serializer = ClientSerializer(data={
'client_id': 'client_id_4',
'status': ClientStatus.O.name,
'ambulance': self.a2.id,
'hospital': None
})
if not serializer.is_valid():
logger.debug('errors = {}'.format(serializer.errors))
self.assertTrue(False)
serializer.save(user=self.u1)
client2 = Client.objects.get(client_id='client_id_4')
self.assertEqual(client2.status, ClientStatus.O.name)
self.assertEqual(client2.user, self.u1)
self.assertEqual(client2.ambulance, self.a2)
self.assertEqual(client2.hospital, None)
# update client
serializer = ClientSerializer(data={
'client_id': 'client_id_4',
'hospital': self.h1.id
}, partial=True)
if not serializer.is_valid():
logger.debug('errors = {}'.format(serializer.errors))
self.assertTrue(False)
serializer.save(user=self.u1)
client2 = Client.objects.get(client_id='client_id_4')
self.assertEqual(client2.status, ClientStatus.O.name)
self.assertEqual(client2.user, self.u1)
self.assertEqual(client2.ambulance, self.a2)
self.assertEqual(client2.hospital, self.h1)
def test_client_viewset(self):
# instantiate client
client = DjangoClient()
# login as admin
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
# create client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u2,
status=ClientStatus.O.name)
# retrieve
response = client.get('/en/api/client/{}/'.format(str(client1.client_id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = ClientSerializer(client1).data
self.assertDictEqual(result, answer)
self.assertEqual(result['username'], self.u2.username)
# set status and ambulance
status = ClientStatus.O.name
response = client.patch('/en/api/client/{}/'.format(str(client1.client_id)),
content_type='application/json',
data=json.dumps({
'status': status,
'ambulance': self.a1.id,
'hospital': self.h1.id
}),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = ClientSerializer(Client.objects.get(id=client1.id)).data
self.assertDictEqual(result, answer)
# retrieve new status
response = client.get('/en/api/client/{}/'.format(str(client1.client_id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['status'], status)
self.assertEqual(result['ambulance'], self.a1.id)
self.assertEqual(result['hospital'], self.h1.id)
self.assertEqual(result['username'], self.u1.username)
# reset ambulance
response = client.patch('/en/api/client/{}/'.format(str(client1.client_id)),
content_type='application/json',
data=json.dumps({
'ambulance': None
}),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = ClientSerializer(Client.objects.get(id=client1.id)).data
self.assertDictEqual(result, answer)
# retrieve new status
response = client.get('/en/api/client/{}/'.format(str(client1.client_id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['status'], status)
self.assertEqual(result['ambulance'], None)
self.assertEqual(result['hospital'], self.h1.id)
self.assertEqual(result['username'], self.u1.username)
# set wrong attribute
response = client.patch('/en/api/client/{}/'.format(str(client1.client_id)),
content_type='application/json',
data=json.dumps({
'status': 'will fail'
}),
follow=True)
self.assertEqual(response.status_code, 400)
# set wrong id
response = client.patch('/en/api/client/100/',
data=json.dumps({
'status': status
}),
follow=True)
self.assertEqual(response.status_code, 404)
# create client
response = client.post('/en/api/client/',
content_type='application/json',
data=json.dumps({
'client_id': 'client_id_2',
'status': ClientStatus.O.name,
'ambulance': None,
'hospital': self.h2.id
}),
follow=True)
self.assertEqual(response.status_code, 201)
result = JSONParser().parse(BytesIO(response.content))
answer = ClientSerializer(Client.objects.get(client_id='client_id_2')).data
self.assertDictEqual(result, answer)
# retrieve client
response = client.get('/en/api/client/{}/'.format('client_id_2'),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['status'], ClientStatus.O.name)
self.assertEqual(result['ambulance'], None)
self.assertEqual(result['hospital'], self.h2.id)
self.assertEqual(result['username'], self.u1.username)
# create client as update
response = client.post('/en/api/client/',
content_type='application/json',
data=json.dumps({
'client_id': 'client_id_2',
'status': ClientStatus.O.name,
'ambulance': self.a1.id
}),
follow=True)
self.assertEqual(response.status_code, 201)
result = JSONParser().parse(BytesIO(response.content))
answer = ClientSerializer(Client.objects.get(client_id='client_id_2')).data
self.assertDictEqual(result, answer)
# retrieve client
response = client.get('/en/api/client/{}/'.format('client_id_2'),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['status'], ClientStatus.O.name)
self.assertEqual(result['ambulance'], self.a1.id)
self.assertEqual(result['hospital'], self.h2.id)
self.assertEqual(result['username'], self.u1.username)
# logout
client.logout()
| 40.998525
| 102
| 0.632514
| 3,004
| 27,797
| 5.807257
| 0.051598
| 0.204643
| 0.123818
| 0.075666
| 0.921009
| 0.912926
| 0.905704
| 0.896819
| 0.894124
| 0.883577
| 0
| 0.019815
| 0.248372
| 27,797
| 677
| 103
| 41.059084
| 0.815153
| 0.022017
| 0
| 0.873494
| 0
| 0
| 0.046489
| 0
| 0
| 0
| 0
| 0
| 0.516064
| 1
| 0.01004
| false
| 0.002008
| 0.028112
| 0
| 0.040161
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
bef78cb0897d54e08313ba0e5ceec9391aad202d
| 198
|
py
|
Python
|
buildpacks/python.py
|
RoboEpics/packman
|
5bec7e153a266210ca53f2592af4b395257ad7aa
|
[
"MIT"
] | 2
|
2020-06-23T22:07:57.000Z
|
2020-07-24T19:42:42.000Z
|
buildpacks/python.py
|
RoboEpics/packman
|
5bec7e153a266210ca53f2592af4b395257ad7aa
|
[
"MIT"
] | 9
|
2021-03-30T13:41:24.000Z
|
2021-12-14T14:51:00.000Z
|
buildpacks/python.py
|
RoboEpics/packman
|
5bec7e153a266210ca53f2592af4b395257ad7aa
|
[
"MIT"
] | 1
|
2020-07-24T19:50:26.000Z
|
2020-07-24T19:50:26.000Z
|
from repo2docker.buildpacks.python import PythonBuildPack
from .conda import PythonRunCommandDetectorMixin
class ModifiedPythonBuildPack(PythonRunCommandDetectorMixin, PythonBuildPack):
pass
| 24.75
| 78
| 0.868687
| 15
| 198
| 11.466667
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005587
| 0.09596
| 198
| 7
| 79
| 28.285714
| 0.955307
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
55c54610582d8b4813c5ceff9e92422c937f39d0
| 15,009
|
py
|
Python
|
tests/intensive/video_tests.py
|
stephengmatthews/fiftyone
|
cd13869ce589644ddfc19693905f6783b3202f4d
|
[
"Apache-2.0"
] | 3
|
2022-01-18T06:13:33.000Z
|
2022-02-14T13:28:23.000Z
|
tests/intensive/video_tests.py
|
3Demonica/fiftyone
|
cd13869ce589644ddfc19693905f6783b3202f4d
|
[
"Apache-2.0"
] | null | null | null |
tests/intensive/video_tests.py
|
3Demonica/fiftyone
|
cd13869ce589644ddfc19693905f6783b3202f4d
|
[
"Apache-2.0"
] | null | null | null |
"""
Video tests.
You must run these tests interactively as follows::
pytest tests/intensive/video_tests.py -s -k <test_case>
| Copyright 2017-2022, Voxel51, Inc.
| `voxel51.com <https://voxel51.com/>`_
|
"""
import random
import unittest
import fiftyone as fo
import fiftyone.zoo as foz
from fiftyone import ViewField as F
def test_to_clips():
dataset = foz.load_zoo_dataset("quickstart-video").clone()
dataset.tag_samples("test")
clips = dataset.filter_labels(
"frames.detections", F("label") == "road sign"
).to_clips("frames.detections")
# Clips inherit sample tags
print(dataset.count_sample_tags())
print(clips.count_sample_tags())
clips.tag_samples("clips")
# Tagging clips has no effect on the source dataset
print(clips.count_sample_tags())
print(dataset.count_sample_tags())
clips.untag_samples("clips")
# Untagging clips has no effect on the source dataset
print(clips.count_sample_tags())
print(dataset.count_sample_tags())
clips.tag_labels("test")
# Tagging labels applies to the source dataset
print(clips.count_label_tags())
print(dataset.count_label_tags())
print(dataset.count_values("frames.detections.detections.tags"))
clips.untag_labels("test")
# Untagging labels applies to the source dataset
print(clips.count_label_tags())
print(dataset.count_label_tags())
print(dataset.count_values("frames.detections.detections.tags"))
view = clips.limit(1).set_field(
"frames.detections.detections.label", F("label").upper()
)
# Views can be created, but don't affect source dataset until saved
print(clips.count("frames.detections.detections"))
print(view.count("frames.detections.detections"))
print(dataset.count("frames.detections.detections"))
print(clips.count_values("frames.detections.detections.label"))
print(view.count_values("frames.detections.detections.label"))
print(dataset.count_values("frames.detections.detections.label"))
view.save()
# Changes and deletions are synced with source dataset
print(clips.count("frames.detections.detections"))
print(view.count("frames.detections.detections"))
print(dataset.count("frames.detections.detections"))
print(clips.count_values("frames.detections.detections.label"))
print(view.count_values("frames.detections.detections.label"))
print(dataset.count_values("frames.detections.detections.label"))
# Ensure that data is correctly formed
print(clips.first().frames.first().id)
print(dataset.first().frames.first().id)
sample = clips.first()
sample["foo"] = "bar"
sample.frames.first()["hello"] = "world"
sample.save()
# Frame fields can be added by direct sample modification, and are saved
print(clips)
print(dataset)
print(clips.count_values("foo"))
print(clips.count_values("frames.hello"))
print(dataset.count_values("frames.hello"))
sample = clips.exclude_fields("frames.detections").first()
sample.frames.first()["hello2"] = "world2"
sample.save()
# Excluded fields are not removed when saving clip samples
print(clips)
print(dataset)
print(clips.count_values("frames.detections.detections.label"))
print(clips.count_values("frames.hello2"))
print(dataset.count_values("frames.detections.detections.label"))
print(dataset.count_values("frames.hello2"))
dataset.untag_samples("test")
clips.reload()
# Reloading a view syncs it with the source dataset
print(dataset.count_sample_tags())
print(clips.count_sample_tags()) # empty because labels were capitalized!
def test_to_frames():
dataset = foz.load_zoo_dataset("quickstart-video").clone()
dataset.tag_samples("test")
frames = dataset.to_frames()
# Frames inherit sample tags
print(dataset.count_sample_tags())
print(frames.count_sample_tags())
frames.tag_samples("frames")
# Tagging frames has no effect on the source dataset
print(frames.count_sample_tags())
print(dataset.count_sample_tags())
frames.untag_samples("frames")
# Untagging frames has no effect on the source dataset
print(frames.count_sample_tags())
print(dataset.count_sample_tags())
frames.tag_labels("test")
# Tagging labels applies to the source dataset
print(frames.count_label_tags())
print(dataset.count_label_tags())
print(dataset.count_values("frames.detections.detections.tags"))
frames.untag_labels("test")
# Untagging labels applies to the source dataset
print(frames.count_label_tags())
print(dataset.count_label_tags())
print(dataset.count_values("frames.detections.detections.tags"))
view = frames.limit(100).set_field(
"detections.detections.label", F("label").upper()
)
# Views can be created, but don't affect source dataset until saved
print(frames.count("detections.detections"))
print(view.count("detections.detections"))
print(dataset.count("frames.detections.detections"))
print(frames.count_values("detections.detections.label"))
print(view.count_values("detections.detections.label"))
print(dataset.count_values("frames.detections.detections.label"))
view.save()
# Changes and deletions are synced with source dataset
print(frames.count("detections.detections"))
print(view.count("detections.detections"))
print(dataset.count("frames.detections.detections"))
print(frames.count_values("detections.detections.label"))
print(view.count_values("detections.detections.label"))
print(dataset.count_values("frames.detections.detections.label"))
# Ensure that data is correctly formed
print(frames.first().sample_id)
print(dataset.first().frames.first().id)
sample = frames.first()
sample["hello"] = "world"
sample.save()
# Fields can be added by direct sample modification, and are saved
print(frames)
print(dataset)
print(frames.count_values("hello"))
print(dataset.count_values("frames.hello"))
sample = frames.exclude_fields("detections").first()
sample["hello2"] = "world2"
sample.save()
# Excluded fields are not removed when saving frame samples
print(frames)
print(dataset)
print(frames.count_values("detections.detections.label"))
print(frames.count_values("hello2"))
print(dataset.count_values("frames.hello2"))
print(dataset.count_values("frames.detections.detections.label"))
dataset.untag_samples("test")
frames.reload()
# Reloading a view syncs it with the source dataset
print(dataset.count_sample_tags())
print(frames.count_sample_tags())
def test_to_clip_frames():
dataset = foz.load_zoo_dataset("quickstart-video").clone()
dataset.compute_metadata()
dataset.tag_samples("test")
clips = dataset.filter_labels(
"frames.detections", F("label") == "person"
).to_clips("frames.detections")
frames = clips.to_frames(fps=1)
print("\nClips view")
for filepath, support in zip(*clips.values(["filepath", "support"])):
print("%s: %s" % (filepath, support))
print("\nFrames view")
for filepath, fn in zip(*frames.values(["filepath", "frame_number"])):
print("%s: %s" % (filepath, fn))
# Frames inherit sample tags
print(dataset.count_sample_tags())
print(frames.count_sample_tags())
frames.tag_samples("clips")
# Tagging frames has no effect on the source dataset
print(frames.count_sample_tags())
print(dataset.count_sample_tags())
frames.untag_samples("clips")
# Untagging clips has no effect on the source dataset
print(frames.count_sample_tags())
print(dataset.count_sample_tags())
frames.tag_labels("test")
# Tagging labels applies to the source dataset
print(frames.count_label_tags())
print(dataset.count_label_tags())
print(dataset.count_values("frames.detections.detections.tags"))
frames.untag_labels("test")
# Untagging labels applies to the source dataset
print(frames.count_label_tags())
print(dataset.count_label_tags())
print(dataset.count_values("frames.detections.detections.tags"))
view = frames.limit(10).set_field(
"detections.detections.label", F("label").upper()
)
# Views can be created, but don't affect source dataset until saved
print(frames.count("detections.detections"))
print(view.count("detections.detections"))
print(dataset.count("frames.detections.detections"))
print(frames.count_values("detections.detections.label"))
print(view.count_values("detections.detections.label"))
print(dataset.count_values("frames.detections.detections.label"))
view.save()
# Changes and deletions are synced with source dataset
print(frames.count("detections.detections"))
print(view.count("detections.detections"))
print(dataset.count("frames.detections.detections"))
print(frames.count_values("detections.detections.label"))
print(view.count_values("detections.detections.label"))
print(dataset.count_values("frames.detections.detections.label"))
# Ensure that data is correctly formed
print(frames.first().id)
print(dataset.first().frames.first().id)
sample = frames.first()
sample["hello"] = "world"
sample.save()
# Fields can be added by direct sample modification, and are saved
print(frames)
print(dataset)
print(frames.count_values("hello"))
print(dataset.count_values("frames.hello"))
sample = frames.exclude_fields("detections").first()
sample["hello2"] = "world2"
sample.save()
# Excluded fields are not removed when saving clip samples
print(frames)
print(dataset)
print(frames.count_values("detections.detections.label"))
print(frames.count_values("hello2"))
print(dataset.count_values("frames.detections.detections.label"))
print(dataset.count_values("frames.hello2"))
dataset.untag_samples("test")
frames.reload()
# Reloading a view syncs it with the source dataset
print(dataset.count_sample_tags())
print(frames.count_sample_tags()) # empty because labels were capitalized!
def test_to_frame_patches():
dataset = foz.load_zoo_dataset("quickstart-video").clone()
dataset.tag_samples("test")
frames = dataset.to_frames()
patches = frames.to_patches("detections")
# Frames and patches inherit sample tags
print(dataset.count_sample_tags())
print(frames.count_sample_tags())
print(patches.count_sample_tags())
patches.tag_samples("patches")
# Tagging frame patches has no effect on the source datasets
print(dataset.count_sample_tags())
print(frames.count_sample_tags())
print(patches.count_sample_tags())
patches.untag_samples("patches")
# Untagging frames has no effect on the source datasets
print(dataset.count_sample_tags())
print(frames.count_sample_tags())
print(patches.count_sample_tags())
patches.tag_labels("test")
# Tagging patch labels applies to the source datasets
print(patches.count_label_tags())
print(patches.count_values("detections.tags"))
print(frames.count_label_tags())
print(frames.count_values("detections.detections.tags"))
print(dataset.count_label_tags())
print(dataset.count_values("frames.detections.detections.tags"))
patches.untag_labels("test")
# Untagging labels applies to the source datasets
print(patches.count_label_tags())
print(patches.count_values("detections.tags"))
print(frames.count_label_tags())
print(frames.count_values("detections.detections.tags"))
print(dataset.count_label_tags())
print(dataset.count_values("frames.detections.detections.tags"))
view = patches.limit(100).set_field("detections.label", F("label").upper())
# Views can be created, but don't affect source datasets until saved
print(view.count("detections"))
print(patches.count("detections"))
print(frames.count("detections.detections"))
print(dataset.count("frames.detections.detections"))
print(view.count_values("detections.label"))
print(patches.count_values("detections.label"))
print(frames.count_values("detections.detections.label"))
print(dataset.count_values("frames.detections.detections.label"))
view.save()
# Changes and deletions are synced with source datasets
print(view.count("detections"))
print(patches.count("detections"))
print(frames.count("detections.detections"))
print(dataset.count("frames.detections.detections"))
print(view.count_values("detections.label"))
print(patches.count_values("detections.label"))
print(frames.count_values("detections.detections.label"))
print(dataset.count_values("frames.detections.detections.label"))
# Ensure that data is correctly formed
print(view.first().frame_id)
print(patches.first().frame_id)
print(frames.first().id)
print(dataset.first().frames.first().id)
sample = view.first()
sample["hello"] = "world"
sample.save()
# New sample-level patch fields are not synced
print(view.count_values("hello"))
print(patches.count_values("hello"))
assert "hello" not in frames.get_field_schema()
assert "hello" not in dataset.get_frame_field_schema()
sample.detections.hello = "world"
sample.save()
# Patch label changes are synced to source datasets
print(view.count_values("detections.hello"))
print(patches.count_values("detections.hello"))
print(frames.count_values("detections.detections.hello"))
print(dataset.count_values("frames.detections.detections.hello"))
dataset.untag_samples("test")
patches.reload()
# Reloading a frame patches view syncs it with the source datasets
print(dataset.count_sample_tags())
print(frames.count_sample_tags())
print(patches.count_sample_tags())
def test_to_frame_eval_patches():
dataset = foz.load_zoo_dataset("quickstart-video").clone()
dataset.limit(1).save()
for sample in dataset:
for frame in sample.frames.values():
predictions = frame["detections"].copy()
for det in predictions.detections:
det.confidence = random.random()
det.bounding_box[0] += 0.03 * (random.random() - 0.5)
det.bounding_box[1] += 0.03 * (random.random() - 0.5)
frame["predictions"] = predictions
sample.save()
dataset.evaluate_detections(
"frames.predictions", gt_field="frames.detections", eval_key="eval",
)
try:
patches = dataset.to_evaluation_patches("eval")
assert False # shouldn't get here
except ValueError:
pass
patches = dataset.to_frames().to_evaluation_patches("eval")
print(patches)
print(patches.first())
print(patches.count_values("type"))
if __name__ == "__main__":
fo.config.show_progress_bars = True
unittest.main(verbosity=2)
| 32.557484
| 79
| 0.709774
| 1,862
| 15,009
| 5.577336
| 0.096133
| 0.078575
| 0.094945
| 0.052576
| 0.830814
| 0.813288
| 0.799326
| 0.794608
| 0.782667
| 0.773231
| 0
| 0.003974
| 0.161636
| 15,009
| 460
| 80
| 32.628261
| 0.821346
| 0.168566
| 0
| 0.693103
| 0
| 0
| 0.22495
| 0.151227
| 0
| 0
| 0
| 0
| 0.010345
| 1
| 0.017241
| false
| 0.003448
| 0.017241
| 0
| 0.034483
| 0.593103
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
3681605a8436017582d70af8f752aa9e3b5182ba
| 696
|
py
|
Python
|
src/train/features/__init__.py
|
ianstokesrees-gamma/demo-machine-learning
|
bc27995165e72ad829395e3a44102032b0d83c47
|
[
"MIT"
] | 3
|
2021-05-11T17:48:09.000Z
|
2022-01-24T17:58:34.000Z
|
src/train/features/__init__.py
|
ianstokesrees-gamma/demo-machine-learning
|
bc27995165e72ad829395e3a44102032b0d83c47
|
[
"MIT"
] | null | null | null |
src/train/features/__init__.py
|
ianstokesrees-gamma/demo-machine-learning
|
bc27995165e72ad829395e3a44102032b0d83c47
|
[
"MIT"
] | 4
|
2020-10-27T02:03:06.000Z
|
2022-01-24T17:59:20.000Z
|
from sklearn.pipeline import FeatureUnion
from .features import (
Speed,
NetClearance,
DistanceFromSideline,
Depth,
PlayerDistanceTravelled,
PlayerImpactDepth,
PreviousDistanceFromSideline,
PreviousTimeToNet,
Hitpoint,
Out,
WeirdNetClearance,
DistanceTravelledRatio
)
FEATURES_LIST = [
Speed,
NetClearance,
DistanceFromSideline,
Depth,
PlayerDistanceTravelled,
PlayerImpactDepth,
PreviousDistanceFromSideline,
PreviousTimeToNet,
Hitpoint,
Out,
WeirdNetClearance,
DistanceTravelledRatio
]
FEATURES_STORE = [(f.name(), f()) for f in FEATURES_LIST]
features_generator = FeatureUnion(FEATURES_STORE)
| 19.333333
| 57
| 0.725575
| 49
| 696
| 10.204082
| 0.510204
| 0.068
| 0.148
| 0.168
| 0.74
| 0.74
| 0.74
| 0.74
| 0.74
| 0.74
| 0
| 0
| 0.212644
| 696
| 35
| 58
| 19.885714
| 0.912409
| 0
| 0
| 0.774194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.064516
| 0
| 0.064516
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3689b82dd63df2d15f7d8e9d9e1c502fb9b44733
| 2,862
|
py
|
Python
|
tests/test_day_of_nth_weekday.py
|
pschoenfelder/named_dates
|
21eadcf7fbbe7f618a37580be668c1600554fbd8
|
[
"MIT"
] | null | null | null |
tests/test_day_of_nth_weekday.py
|
pschoenfelder/named_dates
|
21eadcf7fbbe7f618a37580be668c1600554fbd8
|
[
"MIT"
] | null | null | null |
tests/test_day_of_nth_weekday.py
|
pschoenfelder/named_dates
|
21eadcf7fbbe7f618a37580be668c1600554fbd8
|
[
"MIT"
] | null | null | null |
import pytest
from named_dates.named_dates import\
day_of_nth_weekday, NoNthWeekdayError
# For reference throughout these tests, October 1, 2015 is
# a Thursday (weekday = 3).
def test_weekday_equals_first_of_month():
# Tests that day_of_nth_weekday works when the requested weekday is the
# first weekday is the month.
assert day_of_nth_weekday(2015, 10, 3, nth=1) == 1
assert day_of_nth_weekday(2015, 10, 3, nth=2) == 8
assert day_of_nth_weekday(2015, 10, 3, nth=3) == 15
assert day_of_nth_weekday(2015, 10, 3, nth=4) == 22
assert day_of_nth_weekday(2015, 10, 3, nth=5) == 29
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 3, nth=0)
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 3, nth=6)
def test_weekday_greater_than_first_of_month():
# Tests that day_of_nth_weekday works when the requested weekday is
# greater than the first weekday of the month.
assert day_of_nth_weekday(2015, 10, 5, nth=1) == 3
assert day_of_nth_weekday(2015, 10, 5, nth=2) == 10
assert day_of_nth_weekday(2015, 10, 5, nth=5) == 31
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 5, nth=6)
def test_weekday_less_than_first_of_month():
# Tests that day_of_nth_weekday works when the requested weekday is
# less than the first weekday of the month.
assert day_of_nth_weekday(2015, 10, 1, nth=1) == 6
assert day_of_nth_weekday(2015, 10, 1, nth=2) == 13
assert day_of_nth_weekday(2015, 10, 1, nth=3) == 20
assert day_of_nth_weekday(2015, 10, 1, nth=4) == 27
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 1, nth=5)
def test_from_end():
# October 31 is a Saturday (day 5)
assert day_of_nth_weekday(2015, 10, 5, nth=1, from_end=True) == 31
assert day_of_nth_weekday(2015, 10, 5, nth=2, from_end=True) == 24
assert day_of_nth_weekday(2015, 10, 5, nth=5, from_end=True) == 3
with pytest.raises(NoNthWeekdayError):
assert day_of_nth_weekday(2015, 10, 5, nth=6, from_end=True)
assert day_of_nth_weekday(2015, 10, 3, nth=1, from_end=True) == 29
assert day_of_nth_weekday(2015, 10, 3, nth=2, from_end=True) == 22
assert day_of_nth_weekday(2015, 10, 3, nth=5, from_end=True) == 1
with pytest.raises(NoNthWeekdayError):
assert day_of_nth_weekday(2015, 10, 3, nth=6, from_end=True)
assert day_of_nth_weekday(2015, 10, 6, nth=1, from_end=True) == 25
assert day_of_nth_weekday(2015, 10, 6, nth=2, from_end=True) == 18
assert day_of_nth_weekday(2015, 10, 6, nth=4, from_end=True) == 4
with pytest.raises(NoNthWeekdayError):
assert day_of_nth_weekday(2015, 10, 6, nth=5, from_end=True)
def test_bad_kwargs_disallowed():
with pytest.raises(TypeError):
day_of_nth_weekday(2015, 1, 1, bad_kwarg=1)
| 42.088235
| 75
| 0.70615
| 498
| 2,862
| 3.785141
| 0.126506
| 0.087533
| 0.140053
| 0.262599
| 0.815915
| 0.73687
| 0.73687
| 0.73687
| 0.734748
| 0.604775
| 0
| 0.117975
| 0.185535
| 2,862
| 67
| 76
| 42.716418
| 0.690691
| 0.150943
| 0
| 0.155556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.533333
| 1
| 0.111111
| true
| 0
| 0.044444
| 0
| 0.155556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
36f092d885e27227a8bfe39d09fd773dbb93a634
| 99
|
py
|
Python
|
calulator.py
|
Yolo2050/test-github
|
eb31d836e56d4f43b98f85e8ee22da180815b179
|
[
"MIT"
] | null | null | null |
calulator.py
|
Yolo2050/test-github
|
eb31d836e56d4f43b98f85e8ee22da180815b179
|
[
"MIT"
] | null | null | null |
calulator.py
|
Yolo2050/test-github
|
eb31d836e56d4f43b98f85e8ee22da180815b179
|
[
"MIT"
] | null | null | null |
def calcu():
pass
def calc_add(a,b):
return a+b
def calc_minus(a,b):
return a-b
| 9.9
| 20
| 0.565657
| 19
| 99
| 2.842105
| 0.473684
| 0.148148
| 0.296296
| 0.333333
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.30303
| 99
| 9
| 21
| 11
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.166667
| 0
| 0.333333
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 7
|
3d35f28ad23c17f78a5d221ae46c13beb3b755ab
| 7,848
|
py
|
Python
|
wntr/tests/test_multiple_simulations.py
|
algchyhao/WNTR
|
dd4db188a8641a4da16cf80a1557c908fa48c17d
|
[
"BSD-3-Clause"
] | null | null | null |
wntr/tests/test_multiple_simulations.py
|
algchyhao/WNTR
|
dd4db188a8641a4da16cf80a1557c908fa48c17d
|
[
"BSD-3-Clause"
] | null | null | null |
wntr/tests/test_multiple_simulations.py
|
algchyhao/WNTR
|
dd4db188a8641a4da16cf80a1557c908fa48c17d
|
[
"BSD-3-Clause"
] | null | null | null |
# These tests run a demand-driven simulation with both WNTR and Epanet and compare the results for the example networks
import unittest
from os.path import abspath, dirname, join
import pandas as pd
import pickle
testdir = dirname(abspath(str(__file__)))
test_datadir = join(testdir,'networks_for_testing')
ex_datadir = join(testdir,'..','..','examples','networks')
class TestResetInitialValues(unittest.TestCase):
@classmethod
def setUpClass(self):
import wntr
self.wntr = wntr
inp_file = join(ex_datadir, 'Net3.inp')
self.wn = self.wntr.network.WaterNetworkModel(inp_file)
self.wn.options.time.hydraulic_timestep = 3600
self.wn.options.time.duration = 24*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res1 = sim.run_sim(solver_options={'TOL':1e-8})
self.wn.reset_initial_values()
self.res2 = sim.run_sim(solver_options={'TOL':1e-8})
@classmethod
def tearDownClass(self):
pass
def test_link_flowrate(self):
for link_name, link in self.wn.links():
for t in self.res1.time:
self.assertAlmostEqual(self.res1.link['flowrate'].at[t,link_name], self.res2.link['flowrate'].at[t,link_name], 7)
def test_link_velocity(self):
for link_name, link in self.wn.links():
for t in self.res1.link['velocity'].index:
self.assertAlmostEqual(self.res1.link['velocity'].at[t,link_name], self.res2.link['velocity'].at[t,link_name], 7)
def test_node_demand(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node['demand'].index:
self.assertAlmostEqual(self.res1.node['demand'].at[t,node_name], self.res2.node['demand'].at[t,node_name], 7)
def test_node_head(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node['head'].index:
self.assertAlmostEqual(self.res1.node['head'].at[t,node_name], self.res2.node['head'].at[t,node_name], 7)
def test_node_pressure(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node['pressure'].index:
self.assertAlmostEqual(self.res1.node['pressure'].at[t,node_name], self.res2.node['pressure'].at[t,node_name], 7)
class TestStopStartSim(unittest.TestCase):
@classmethod
def setUpClass(self):
import wntr
self.wntr = wntr
inp_file = join(ex_datadir, 'Net3.inp')
parser = self.wntr.epanet.InpFile()
self.wn = parser.read(inp_file)
self.wn.options.time.hydraulic_timestep = 3600
self.wn.options.time.duration = 24*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res1 = sim.run_sim(solver_options={'TOL':1e-8})
parser = self.wntr.epanet.InpFile()
self.wn = parser.read(inp_file)
self.wn.options.time.hydraulic_timestep = 3600
self.wn.options.time.duration = 10*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res2 = sim.run_sim(solver_options={'TOL':1e-8})
self.wn.options.time.duration = 24*3600
self.res3 = sim.run_sim(solver_options={'TOL':1e-8})
node_res = {}
link_res = {}
for key in self.res2.node.keys():
node_res[key] = pd.concat([self.res2.node[key],self.res3.node[key]],axis=0)
for key in self.res2.link.keys():
link_res[key] = pd.concat([self.res2.link[key],self.res3.link[key]],axis=0)
self.res2.node = node_res
self.res2.link = link_res
@classmethod
def tearDownClass(self):
pass
def test_link_flowrate(self):
for link_name, link in self.wn.links():
for t in self.res1.link['flowrate'].index:
self.assertAlmostEqual(self.res1.link['flowrate'].at[t,link_name], self.res2.link['flowrate'].at[t,link_name], 7)
def test_link_velocity(self):
for link_name, link in self.wn.links():
for t in self.res1.link['velocity'].index:
self.assertAlmostEqual(self.res1.link['velocity'].at[t,link_name], self.res2.link['velocity'].at[t,link_name], 7)
def test_node_demand(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node['demand'].index:
self.assertAlmostEqual(self.res1.node['demand'].at[t,node_name], self.res2.node['demand'].at[t,node_name], 7)
def test_node_head(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node['head'].index:
self.assertAlmostEqual(self.res1.node['head'].at[t,node_name], self.res2.node['head'].at[t,node_name], 7)
def test_node_pressure(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node['pressure'].index:
self.assertAlmostEqual(self.res1.node['pressure'].at[t,node_name], self.res2.node['pressure'].at[t,node_name], 7)
class TestPickle(unittest.TestCase):
@classmethod
def setUpClass(self):
import wntr
self.wntr = wntr
inp_file = join(ex_datadir, 'Net3.inp')
parser = self.wntr.epanet.InpFile()
self.wn = parser.read(inp_file)
self.wn.options.time.hydraulic_timestep = 3600
self.wn.options.time.duration = 24*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res1 = sim.run_sim(solver_options={'TOL':1e-8})
parser = self.wntr.epanet.InpFile()
self.wn = parser.read(inp_file)
self.wn.options.time.hydraulic_timestep = 3600
self.wn.options.time.duration = 10*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res2 = sim.run_sim(solver_options={'TOL':1e-8})
f=open('temp.pickle','wb')
pickle.dump(self.wn,f)
f.close()
f=open('temp.pickle','rb')
wn2 = pickle.load(f)
f.close()
wn2.options.time.duration = 24*3600
sim = self.wntr.sim.WNTRSimulator(wn2)
self.res3 = sim.run_sim(solver_options={'TOL':1e-8})
node_res = {}
link_res = {}
for key in self.res2.node.keys():
node_res[key] = pd.concat([self.res2.node[key],self.res3.node[key]],axis=0)
for key in self.res2.link.keys():
link_res[key] = pd.concat([self.res2.link[key],self.res3.link[key]],axis=0)
self.res2.node = node_res
self.res2.link = link_res
@classmethod
def tearDownClass(self):
pass
def test_link_flowrate(self):
for link_name, link in self.wn.links():
for t in self.res1.link['flowrate'].index:
self.assertAlmostEqual(self.res1.link['flowrate'].at[t,link_name], self.res2.link['flowrate'].at[t,link_name], 7)
def test_link_velocity(self):
for link_name, link in self.wn.links():
for t in self.res1.link['velocity'].index:
self.assertAlmostEqual(self.res1.link['velocity'].at[t,link_name], self.res2.link['velocity'].at[t,link_name], 7)
def test_node_demand(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node['demand'].index:
self.assertAlmostEqual(self.res1.node['demand'].at[t,node_name], self.res2.node['demand'].at[t,node_name], 7)
def test_node_head(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node['head'].index:
self.assertAlmostEqual(self.res1.node['head'].at[t,node_name], self.res2.node['head'].at[t,node_name], 7)
def test_node_pressure(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node['pressure'].index:
self.assertAlmostEqual(self.res1.node['pressure'].at[t,node_name], self.res2.node['pressure'].at[t,node_name], 7)
if __name__ == '__main__':
unittest.main()
| 40.040816
| 129
| 0.628695
| 1,131
| 7,848
| 4.241379
| 0.096375
| 0.04753
| 0.045028
| 0.041276
| 0.896602
| 0.896602
| 0.896602
| 0.89139
| 0.89139
| 0.89139
| 0
| 0.02828
| 0.225025
| 7,848
| 195
| 130
| 40.246154
| 0.760441
| 0.014908
| 0
| 0.863636
| 0
| 0
| 0.054341
| 0
| 0
| 0
| 0
| 0
| 0.097403
| 1
| 0.136364
| false
| 0.019481
| 0.045455
| 0
| 0.201299
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e9e52854b622fafefe3a77f55d5ee942baab76e6
| 6,606
|
py
|
Python
|
mpesa_api/core/models.py
|
raccoongang/mpesa_api
|
38e90a63b91df8f90079cf946864540c5d7610e0
|
[
"MIT"
] | 1
|
2019-10-08T12:36:55.000Z
|
2019-10-08T12:36:55.000Z
|
mpesa_api/core/models.py
|
raccoongang/mpesa_api
|
38e90a63b91df8f90079cf946864540c5d7610e0
|
[
"MIT"
] | 3
|
2020-02-12T02:53:13.000Z
|
2021-06-10T22:18:28.000Z
|
mpesa_api/core/models.py
|
eugenewere/mpesa
|
9f4b7d7381b54578a76a163ba764157476611e94
|
[
"MIT"
] | 2
|
2020-11-04T08:10:19.000Z
|
2020-11-06T08:32:54.000Z
|
from django.db import models
from mpesa_api.util.managers import AuthTokenManager
class AuthToken(models.Model):
"""Handles AuthTokens"""
access_token = models.CharField(max_length=40);
type = models.CharField(max_length=3)
expires_in = models.BigIntegerField()
objects = AuthTokenManager()
def __str__(self):
return self.access_token
class Meta:
db_table ='tbl_access_token'
class B2CRequest(models.Model):
"""
Handles B2C requests
"""
id = models.BigAutoField(primary_key=True)
phone = models.BigIntegerField()
amount = models.DecimalField(max_digits=20, decimal_places=2)
conversation_id = models.CharField(max_length=40, blank=True, null=True)
originator_conversation_id = models.CharField(max_length=40, blank=True, null=True)
response_code = models.CharField(max_length=5, blank=True, null=True)
response_description = models.TextField(blank=True, null=True)
request_id = models.CharField(max_length=20, blank=True, null=True)
error_code = models.CharField(max_length=20, blank=True, null=True)
error_message = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.phone)
class Meta:
db_table ='tbl_b2c_requests'
verbose_name_plural = 'B2C Requests'
class B2CResponse(models.Model):
"""
Handles B2C Response
"""
id = models.BigAutoField(primary_key=True)
phone = models.BigIntegerField(blank=True, null=True)
amount = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
conversation_id = models.CharField(max_length=40, blank=True, null=True)
originator_conversation_id = models.CharField(max_length=40, blank=True, null=True)
result_type = models.CharField(max_length=5, blank=True, null=True)
result_code = models.CharField(max_length=5, blank=True, null=True)
result_description = models.TextField(blank=True, null=True)
transaction_id = models.CharField(max_length=20, blank=True, null=True)
transaction_receipt = models.CharField(max_length=20, blank=True, null=True)
transaction_amount = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
working_funds = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
utility_funds = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
paid_account_funds = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
transaction_date = models.DateTimeField(blank=True, null=True)
mpesa_user_name = models.CharField(max_length=100, blank=True, null=True)
is_registered_customer = models.CharField(max_length=1, blank=True, null=True)
def __str__(self):
return str(self.phone)
class Meta:
db_table = 'tbl_b2c_response'
verbose_name_plural = 'B2C Responses'
class C2BRequest(models.Model):
"""
Handles C2B Requests
"""
id = models.BigAutoField(primary_key=True)
transaction_type = models.CharField(max_length=20, blank=True, null=True)
transaction_id = models.CharField(max_length=20, unique=True)
transaction_date = models.DateTimeField(blank=True, null=True)
amount = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
business_short_code = models.CharField(max_length=20, blank=True, null=True)
bill_ref_number = models.CharField(max_length=50, blank=True, null=True)
invoice_number = models.CharField(max_length=50, blank=True, null=True)
org_account_balance = models.DecimalField(max_digits=20, decimal_places=2,
blank=True, null=True, default=0.0)
third_party_trans_id = models.CharField(max_length=50, blank=True, null=True)
phone = models.BigIntegerField(blank=True, null=True)
first_name = models.CharField(max_length=50, blank=True, null=True)
middle_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
is_validated = models.BooleanField(default=False)
is_completed = models.BooleanField(default=False)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{} {} {}'.format(self.first_name, self.middle_name, self.last_name)
class Meta:
db_table ='tbl_c2b_requests'
verbose_name_plural = 'C2B Requests'
@property
def name(self):
return '{} {} {}'.format(self.first_name, self.middle_name, self.last_name)
class OnlineCheckout(models.Model):
"""
Handles Online Checkout
"""
id = models.BigAutoField(primary_key=True)
phone = models.BigIntegerField()
amount = models.DecimalField(max_digits=20, decimal_places=2)
checkout_request_id = models.CharField(max_length=50, default='')
account_reference = models.CharField(max_length=50, default='')
transaction_description = models.CharField(max_length=50, blank=True, null=True)
customer_message = models.CharField(max_length=100, blank=True, null=True)
merchant_request_id = models.CharField(max_length=50, blank=True, null=True)
response_code = models.CharField(max_length=5, blank=True, null=True)
response_description = models.CharField(max_length=100, blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.phone)
class Meta:
db_table = 'tbl_online_checkout_requests'
verbose_name_plural = 'Online Checkout Requests'
class OnlineCheckoutResponse(models.Model):
"""
Handles Online Checkout Response
"""
id = models.BigAutoField(primary_key=True)
merchant_request_id = models.CharField(max_length=50, blank=True, null=True)
checkout_request_id = models.CharField(max_length=50, default='')
result_code = models.CharField(max_length=5, blank=True, null=True)
result_description = models.CharField(max_length=100, blank=True, null=True)
mpesa_receipt_number = models.CharField(max_length=50, blank=True, null=True)
transaction_date = models.DateTimeField(blank=True, null=True)
phone = models.BigIntegerField(blank=True, null=True)
amount = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.phone)
class Meta:
db_table = 'tbl_online_checkout_responses'
verbose_name_plural = 'Online Checkout Responses'
| 42.896104
| 100
| 0.730245
| 865
| 6,606
| 5.346821
| 0.137572
| 0.091459
| 0.132108
| 0.172757
| 0.814486
| 0.759135
| 0.747243
| 0.718919
| 0.718919
| 0.655135
| 0
| 0.020375
| 0.16046
| 6,606
| 153
| 101
| 43.176471
| 0.813559
| 0.02089
| 0
| 0.468468
| 0
| 0
| 0.034915
| 0.008924
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063063
| false
| 0
| 0.018018
| 0.063063
| 0.882883
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
1816a610504a45a4fb7a6e82fa5216f9a770f3b5
| 12,645
|
py
|
Python
|
train.py
|
amysudarat/affective-monitor-model
|
4de08704a3d36a0a228d5eeb9a33317be51f18fc
|
[
"MIT"
] | null | null | null |
train.py
|
amysudarat/affective-monitor-model
|
4de08704a3d36a0a228d5eeb9a33317be51f18fc
|
[
"MIT"
] | null | null | null |
train.py
|
amysudarat/affective-monitor-model
|
4de08704a3d36a0a228d5eeb9a33317be51f18fc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
contains main loop for training
"""
import torch
import utils
import matplotlib.pyplot as plt
import numpy as np
import torch.nn as nn
from torch.utils.data.sampler import SubsetRandomSampler
from model.dataset_class import AffectiveMonitorDataset
from model.net_valence import myLSTM_valence
from model.net_arousal import myLSTM_arousal
def train_valence(pickle_file="data_1_50_toTensor.pkl",learning_rate=0.03):
# Load Dataset
# n = 2
# subjects = [i for i in range(1,n+1)]
# face_dataset = AffectiveMonitorDataset("C:\\Users\\dspcrew\\affective-monitor-model\\data",subjects=subjects)
# face_dataset = AffectiveMonitorDataset("C:\\Users\\DSPLab\\Research\\affective-monitor-model\\data")
# face_dataset = AffectiveMonitorDataset("E:\\Research\\affective-monitor-model\\data")
face_dataset = utils.load_object(pickle_file)
# split train and test dataset
validation_split = 0.2
random_seed = 42
shuffle_dataset = True
dataset_size = len(face_dataset)
indices = list(range(dataset_size))
split = int(np.floor(validation_split*dataset_size))
if shuffle_dataset:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
train_sampler = SubsetRandomSampler(train_indices)
test_sampler = SubsetRandomSampler(val_indices)
# Make Dataset Iterable
batch_size = 100
n_iters = 1000
train_loader = torch.utils.data.DataLoader(face_dataset,
batch_size=batch_size,
sampler=train_sampler)
test_loader = torch.utils.data.DataLoader(face_dataset,
batch_size=batch_size,
sampler=test_sampler)
# Instatiate dimension parameters
# 100 time steps
# Each time step: input dimension = 19
# how many hidden layer: 1 hidden layer
# output dimension = 5
input_dim = 19
hidden_dim = 100
layer_dim = 1
output_dim = 5
# Number of steps to unroll
seq_dim = 100
num_epochs = int(n_iters/ (len(train_sampler)/batch_size))
# num_epochs = 1
# Instantiate Model class
model = myLSTM_valence(input_dim,hidden_dim,layer_dim,output_dim)
if torch.cuda.is_available():
model = model.cuda()
# Instantiate Loss class
criterion = nn.CrossEntropyLoss()
# Instantiate Optimizer Class
# learning_rate = 0.01
optimizer = torch.optim.SGD(model.parameters(),lr = learning_rate)
# training loop
iteration = 0
iter_num = []
loss_list = []
for epoch in range(num_epochs):
for i, data in enumerate(train_loader):
FAPs = data['FAP']
labels = data['Valence']
# Cast labels to float
labels = labels.long()
# Cast input to Float (Model weight is set to Float by Default)
FAPs = FAPs.float()
# Load input vector as tensors
if torch.cuda.is_available():
FAPs = FAPs.view(-1,seq_dim,input_dim).cuda()
labels = labels.cuda()
else:
FAPs = FAPs.view(-1,seq_dim,input_dim)
# Set existing torch with gradient accumation abilities
FAPs.requires_grad = True
# Clear gradients w.r.t. parameters
optimizer.zero_grad()
# Forward pass to get output/logits
# output.size() --> 100,4
outputs = model(FAPs)
# Calculate Loss: softmax --> cross entropy loss
loss = criterion(outputs,labels)
# Getting gradients w.r.t. parameters
loss.backward()
# Updating parameters
optimizer.step()
iteration = iteration+1
# Calculate accuracy every 1000 step
if iteration%100 == 0:
correct = 0
total = 0
# Iterate through test dataset
for i, data in enumerate(test_loader):
FAPs = data['FAP']
labels = data['Valence']
# Cast labels to float
labels = labels.long()
# Cast input to Float
FAPs = FAPs.float()
# Load input vector as tensors
if torch.cuda.is_available():
FAPs = FAPs.view(-1,seq_dim,input_dim).cuda()
labels = labels.cuda()
else:
FAPs = FAPs.view(-1,seq_dim,input_dim)
# Set existing torch
FAPs.requires_grad = True
# Forward pass only to get logits/output
outputs = model(FAPs)
# Get predictions from the maximum value
_, predicted = torch.max(outputs.data,1)
# Total number of labels (sum of batches)
total = total + labels.size(0)
# total accuracy prediction
if torch.cuda.is_available():
correct = correct + (predicted.cpu() == labels.cpu()).sum()
else:
correct = correct + (predicted == labels).sum()
accuracy = 100 * (correct.item()/total)
iter_num.append(iteration)
loss_list.append(loss.item())
# print Loss
print("Iteration: {}. Loss: {}. Accuracy: {}".format(iteration,loss.item(),accuracy))
# Plot Graph
plt.plot(iter_num,loss_list)
plt.xlabel("Number of Iterations")
plt.ylabel("Loss")
plt.show()
def train_arousal(pickle_file="data_1_4_toTensor.pkl",learning_rate=0.01):
# Load Dataset
# n = 2
# subjects = [i for i in range(1,n+1)]
# face_dataset = AffectiveMonitorDataset("C:\\Users\\dspcrew\\affective-monitor-model\\data",subjects=subjects)
# face_dataset = AffectiveMonitorDataset("C:\\Users\\DSPLab\\Research\\affective-monitor-model\\data")
# face_dataset = AffectiveMonitorDataset("E:\\Research\\affective-monitor-model\\data")
face_dataset = utils.load_object(pickle_file)
# split train and test dataset
validation_split = 0.2
random_seed = 42
shuffle_dataset = True
dataset_size = len(face_dataset)
indices = list(range(dataset_size))
split = int(np.floor(validation_split*dataset_size))
if shuffle_dataset:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
train_sampler = SubsetRandomSampler(train_indices)
test_sampler = SubsetRandomSampler(val_indices)
# Make Dataset Iterable
batch_size = 100
n_iters = 350*3
train_loader = torch.utils.data.DataLoader(face_dataset,
batch_size=batch_size,
sampler=train_sampler)
test_loader = torch.utils.data.DataLoader(face_dataset,
batch_size=batch_size,
sampler=test_sampler)
# Instatiate dimension parameters
# 100 time steps
# Each time step: input dimension = 19
# how many hidden layer: 1 hidden layer
# output dimension = 5
input_dim = 1
hidden_dim = 100
layer_dim = 1
output_dim = 5
# Number of steps to unroll
seq_dim = 100
num_epochs = int(n_iters/ (len(train_sampler)/batch_size))
# num_epochs = 1
# Instantiate Model class
model = myLSTM_arousal(input_dim,hidden_dim,layer_dim,output_dim)
# GPU configuration
if torch.cuda.is_available():
model.cuda()
# Instantiate Loss class
criterion = nn.CrossEntropyLoss()
# Instantiate Optimizer Class
# learning_rate = 0.05
optimizer = torch.optim.SGD(model.parameters(),lr = learning_rate)
# training loop
iteration = 0
iter_num = []
loss_list = []
accuracy_list = []
for epoch in range(num_epochs):
for i, data in enumerate(train_loader):
PDs = data['PD']
labels = data['Arousal']
# labels = labels*10
# Cast input to Float (Model weight is set to Float by Default)
PDs = PDs.float()
# Cast labels to float
labels = labels.long()
# Load input vector as tensors
if torch.cuda.is_available():
PDs = PDs.view(-1,seq_dim,input_dim).cuda()
labels = labels.cuda()
else:
PDs = PDs.view(-1,seq_dim,input_dim)
# Set existing torch with gradient accumulation abilities
PDs.requires_grad = True
# Clear gradients w.r.t. parameters
optimizer.zero_grad()
# Forward pass to get output/logits
# output.size() --> 100,4
outputs = model(PDs)
# Calculate Loss: softmax --> cross entropy loss
loss = criterion(outputs,labels)
# Getting gradients w.r.t. parameters
loss.backward()
# Updating parameters
optimizer.step()
iteration = iteration+1
# Calculate accuracy every 1000 step
if iteration%100 == 0:
correct = 0
total = 0
# Iterate through test dataset
for i, data in enumerate(test_loader):
PDs = data['PD']
labels = data['Arousal']
# Cast input to Float
PDs = PDs.float()
# Cast labels to float
labels = labels.long()
# Load input vector as tensors
if torch.cuda.is_available():
PDs = PDs.view(-1,seq_dim,input_dim).cuda()
labels = labels.cuda()
else:
PDs = PDs.view(-1,seq_dim,input_dim)
# Set existing torch
PDs.requires_grad = True
# Forward pass only to get logits/output
outputs = model(PDs)
# Get predictions from the maximum value
_, predicted = torch.max(outputs.data,1)
# Total number of labels (sum of batches)
total = total + labels.size(0)
# total accuracy prediction
if torch.cuda.is_available():
correct = correct + (predicted.cpu() == labels.cpu()).sum()
else:
correct = correct + (predicted == labels).sum()
accuracy = 100 * (correct.item()/total)
iter_num.append(iteration)
loss_list.append(loss.item())
accuracy_list.append(accuracy)
# print Loss
print("Iteration: {}. Loss: {}. Accuracy: {}".format(iteration,loss.item(),accuracy))
# Plot Graph
fig, (ax_loss, ax_lc) = plt.subplots(nrows=2,ncols=1,sharex=True)
ax_loss.plot(iter_num,loss_list)
ax_lc.plot(iter_num,accuracy_list)
ax_loss.grid(True)
ax_lc.grid(True)
ax_lc.set_xlabel("Number of Iterations")
ax_loss.set_ylabel("Loss")
ax_lc.set_ylabel("Learning curve")
fig.suptitle("learning rate: "+str(learning_rate))
plt.show()
if __name__ == "__main__":
# train_valence(pickle_file="data_1_50_toTensor.pkl",learning_rate=0.01)
train_arousal(pickle_file="data_1_50_toTensor.pkl",learning_rate=0.03)
| 35.720339
| 114
| 0.529142
| 1,309
| 12,645
| 4.954163
| 0.161192
| 0.023747
| 0.01357
| 0.016037
| 0.88219
| 0.87633
| 0.858443
| 0.850424
| 0.839938
| 0.839938
| 0
| 0.018931
| 0.385923
| 12,645
| 353
| 115
| 35.82153
| 0.816227
| 0.238039
| 0
| 0.810811
| 0
| 0
| 0.027469
| 0.006815
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010811
| false
| 0
| 0.048649
| 0
| 0.059459
| 0.010811
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a15bd1bdc48f1ca1772690bb636f372c35794080
| 27,846
|
py
|
Python
|
Pyfiles/circuits.py
|
rickyHong/Quantum_Machine_Learning_Express
|
ba5f57b3544b1c73b49eb251800459fc2394df2f
|
[
"MIT"
] | 14
|
2021-03-04T22:55:24.000Z
|
2022-03-31T12:11:35.000Z
|
Pyfiles/circuits.py
|
rickyHong/Quantum_Machine_Learning_Express
|
ba5f57b3544b1c73b49eb251800459fc2394df2f
|
[
"MIT"
] | 15
|
2021-03-08T15:39:53.000Z
|
2021-08-19T18:10:12.000Z
|
Pyfiles/circuits.py
|
rickyHong/Quantum_Machine_Learning_Express
|
ba5f57b3544b1c73b49eb251800459fc2394df2f
|
[
"MIT"
] | 9
|
2021-06-10T23:26:53.000Z
|
2022-02-21T16:31:09.000Z
|
from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
from math import pi
def circuit1(qc,qr,theta,L,repeat):
#circuit 1
#theta is list of the parameters
#theta length is 8L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit2(qc,qr,theta,L,repeat):
#circuit 2
#theta is list of the parameters
#theta length is 8L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.cx(qr[3],qr[2])
qc.cx(qr[2],qr[1])
qc.cx(qr[1],qr[0])
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.cx(qr[1],qr[0])
qc.cx(qr[2],qr[1])
qc.cx(qr[3],qr[2])
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit3(qc,qr,theta,L,repeat):
#circuit 3
#theta is list of the parameters
#theta length is (11)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crz(theta[count],qr[3],qr[2])
count=count+1
qc.crz(theta[count],qr[2],qr[1])
count=count+1
qc.crz(theta[count],qr[1],qr[0])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crz(theta[count],qr[1],qr[0])
count=count+1
qc.crz(theta[count],qr[2],qr[1])
count=count+1
qc.crz(theta[count],qr[3],qr[2])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit4(qc,qr,theta,L,repeat):
#circuit 4
#theta is list of the parameters
#theta length is (11)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crx(theta[count],qr[3],qr[2])
count=count+1
qc.crx(theta[count],qr[2],qr[1])
count=count+1
qc.crx(theta[count],qr[1],qr[0])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crx(theta[count],qr[1],qr[0])
count=count+1
qc.crx(theta[count],qr[2],qr[1])
count=count+1
qc.crx(theta[count],qr[3],qr[2])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit5(qc,qr,theta,L,repeat):
#circuit 5
#theta is list of the parameters
#theta length is (28)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for j in range(4):
for i in range(4):
if i!=j:
qc.crz(theta[count],qr[3-j],qr[3-i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for j in range(4):
for i in range(4):
if i!=j:
qc.crz(theta[count],qr[j],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit6(qc,qr,theta,L,repeat):
#circuit 6
#theta is list of the parameters
#theta length is (28)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for j in range(4):
for i in range(4):
if i!=j:
qc.crx(theta[count],qr[3-j],qr[3-i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for j in range(4):
for i in range(4):
if i!=j:
qc.crx(theta[count],qr[j],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit7(qc,qr,theta,L,repeat):
#circuit 7
#theta is list of the parameters
#theta length is (19)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crz(theta[count],qr[1],qr[0])
count=count+1
qc.crz(theta[count],qr[3],qr[2])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crz(theta[count],qr[2],qr[1])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crz(theta[count],qr[2],qr[1])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
qc.crz(theta[count],qr[3],qr[2])
count=count+1
qc.crz(theta[count],qr[1],qr[0])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit8(qc,qr,theta,L,repeat):
#circuit 8
#theta is list of the parameters
#theta length is (19)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crx(theta[count],qr[1],qr[0])
count=count+1
qc.crx(theta[count],qr[3],qr[2])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crx(theta[count],qr[2],qr[1])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crx(theta[count],qr[2],qr[1])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
qc.crx(theta[count],qr[3],qr[2])
count=count+1
qc.crx(theta[count],qr[1],qr[0])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit9(qc,qr,theta,L,repeat):
#circuit 9
#theta is list of the parameters
#theta length is (4)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.h(qr[i])
qc.cz(qr[3],qr[2])
qc.cz(qr[2],qr[1])
qc.cz(qr[1],qr[0])
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
qc.cz(qr[1],qr[0])
qc.cz(qr[2],qr[1])
qc.cz(qr[3],qr[2])
for i in range(4):
qc.h(qr[i])
return qc
def circuit10(qc,qr,theta,L,repeat):
#circuit 10
#theta is list of the parameters
#theta length is (4)L+4
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
for l in range(L):
qc.cz(qr[3],qr[2])
qc.cz(qr[2],qr[1])
qc.cz(qr[1],qr[0])
qc.cz(qr[3],qr[0])
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.cz(qr[3],qr[0])
qc.cz(qr[1],qr[0])
qc.cz(qr[2],qr[1])
qc.cz(qr[3],qr[2])
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
return qc
def circuit11(qc,qr,theta,L,repeat):
#circuit 11
#theta is list of the parameters
#theta length is (12)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.cx(qr[1],qr[0])
qc.cx(qr[3],qr[2])
qc.ry(theta[count],qr[1])
count=count+1
qc.ry(theta[count],qr[2])
count=count+1
qc.rz(theta[count],qr[1])
count=count+1
qc.rz(theta[count],qr[2])
count=count+1
qc.cx(qr[2],qr[1])
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.cx(qr[2],qr[1])
qc.rz(theta[count],qr[2])
count=count+1
qc.rz(theta[count],qr[1])
count=count+1
qc.ry(theta[count],qr[2])
count=count+1
qc.ry(theta[count],qr[1])
count=count+1
qc.cx(qr[3],qr[2])
qc.cx(qr[1],qr[0])
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
return qc
def circuit12(qc,qr,theta,L,repeat):
#circuit 12
#theta is list of the parameters
#theta length is (12)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.cz(qr[1],qr[0])
qc.cz(qr[3],qr[2])
qc.ry(theta[count],qr[1])
count=count+1
qc.ry(theta[count],qr[2])
count=count+1
qc.rz(theta[count],qr[1])
count=count+1
qc.rz(theta[count],qr[2])
count=count+1
qc.cz(qr[2],qr[1])
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.cz(qr[2],qr[1])
qc.rz(theta[count],qr[2])
count=count+1
qc.rz(theta[count],qr[1])
count=count+1
qc.ry(theta[count],qr[2])
count=count+1
qc.ry(theta[count],qr[1])
count=count+1
qc.cz(qr[3],qr[2])
qc.cz(qr[1],qr[0])
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
return qc
def circuit13(qc,qr,theta,L,repeat):
#circuit 13
#theta is list of the parameters
#theta length is (16)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.crz(theta[count],qr[3],qr[0])
count=count+1
qc.crz(theta[count],qr[2],qr[3])
count=count+1
qc.crz(theta[count],qr[1],qr[2])
count=count+1
qc.crz(theta[count],qr[0],qr[1])
count=count+1
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.crz(theta[count],qr[3],qr[2])
count=count+1
qc.crz(theta[count],qr[0],qr[3])
count=count+1
qc.crz(theta[count],qr[1],qr[0])
count=count+1
qc.crz(theta[count],qr[2],qr[1])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crz(theta[count],qr[2],qr[1])
count=count+1
qc.crz(theta[count],qr[1],qr[0])
count=count+1
qc.crz(theta[count],qr[0],qr[3])
count=count+1
qc.crz(theta[count],qr[3],qr[2])
count=count+1
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.crz(theta[count],qr[0],qr[1])
count=count+1
qc.crz(theta[count],qr[1],qr[2])
count=count+1
qc.crz(theta[count],qr[2],qr[3])
count=count+1
qc.crz(theta[count],qr[3],qr[0])
count=count+1
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
return qc
def circuit14(qc,qr,theta,L,repeat):
#circuit 14
#theta is list of the parameters
#theta length is (16)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.crx(theta[count],qr[3],qr[0])
count=count+1
qc.crx(theta[count],qr[2],qr[3])
count=count+1
qc.crx(theta[count],qr[1],qr[2])
count=count+1
qc.crx(theta[count],qr[0],qr[1])
count=count+1
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.crx(theta[count],qr[3],qr[2])
count=count+1
qc.crx(theta[count],qr[0],qr[3])
count=count+1
qc.crx(theta[count],qr[1],qr[0])
count=count+1
qc.crx(theta[count],qr[2],qr[1])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crx(theta[count],qr[2],qr[1])
count=count+1
qc.crx(theta[count],qr[1],qr[0])
count=count+1
qc.crx(theta[count],qr[0],qr[3])
count=count+1
qc.crx(theta[count],qr[3],qr[2])
count=count+1
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.crx(theta[count],qr[0],qr[1])
count=count+1
qc.crx(theta[count],qr[1],qr[2])
count=count+1
qc.crx(theta[count],qr[2],qr[3])
count=count+1
qc.crx(theta[count],qr[3],qr[0])
count=count+1
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
return qc
def circuit15(qc,qr,theta,L,repeat):
#circuit 15
#theta is list of the parameters
#theta length is (8)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.cx(qr[3],qr[0])
qc.cx(qr[2],qr[3])
qc.cx(qr[1],qr[2])
qc.cx(qr[0],qr[1])
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.cx(qr[3],qr[2])
qc.cx(qr[0],qr[3])
qc.cx(qr[1],qr[0])
qc.cx(qr[2],qr[1])
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.cx(qr[2],qr[1])
qc.cx(qr[1],qr[0])
qc.cx(qr[0],qr[3])
qc.cx(qr[3],qr[2])
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
qc.cx(qr[0],qr[1])
qc.cx(qr[1],qr[2])
qc.cx(qr[2],qr[3])
qc.cx(qr[3],qr[0])
for i in range(4):
qc.ry(theta[count],qr[i])
count=count+1
return qc
def circuit16(qc,qr,theta,L,repeat):
#circuit 16
#theta is list of the parameters
#theta length is (11)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crz(theta[count],qr[1],qr[0])
count=count+1
qc.crz(theta[count],qr[3],qr[2])
count=count+1
qc.crz(theta[count],qr[2],qr[1])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crz(theta[count],qr[2],qr[1])
count=count+1
qc.crz(theta[count],qr[3],qr[2])
count=count+1
qc.crz(theta[count],qr[1],qr[0])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit17(qc,qr,theta,L,repeat):
#circuit 17
#theta is list of the parameters
#theta length is (11)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crx(theta[count],qr[1],qr[0])
count=count+1
qc.crx(theta[count],qr[3],qr[2])
count=count+1
qc.crx(theta[count],qr[2],qr[1])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crx(theta[count],qr[2],qr[1])
count=count+1
qc.crx(theta[count],qr[3],qr[2])
count=count+1
qc.crx(theta[count],qr[1],qr[0])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit18(qc,qr,theta,L,repeat):
#circuit 18
#theta is list of the parameters
#theta length is (12)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crz(theta[count],qr[3],qr[0])
count=count+1
qc.crz(theta[count],qr[2],qr[3])
count=count+1
qc.crz(theta[count],qr[1],qr[2])
count=count+1
qc.crz(theta[count],qr[0],qr[1])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crz(theta[count],qr[0],qr[1])
count=count+1
qc.crz(theta[count],qr[1],qr[2])
count=count+1
qc.crz(theta[count],qr[2],qr[3])
count=count+1
qc.crz(theta[count],qr[3],qr[0])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
def circuit19(qc,qr,theta,L,repeat):
#circuit 1
#theta is list of the parameters
#theta length is (12)L
#L is the number of repeatation
# repeat will conjugate the first part and add next the the circuit for expressibility
# 0:No, 1: Repeat
count=0
for l in range(L):
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
qc.crx(theta[count],qr[3],qr[0])
count=count+1
qc.crx(theta[count],qr[2],qr[3])
count=count+1
qc.crx(theta[count],qr[1],qr[2])
count=count+1
qc.crx(theta[count],qr[0],qr[1])
count=count+1
if repeat!=0:
qc.barrier(qr)
for l in range(L):
qc.crx(theta[count],qr[0],qr[1])
count=count+1
qc.crx(theta[count],qr[1],qr[2])
count=count+1
qc.crx(theta[count],qr[2],qr[3])
count=count+1
qc.crx(theta[count],qr[3],qr[0])
count=count+1
for i in range(4):
qc.rz(theta[count],qr[i])
count=count+1
for i in range(4):
qc.rx(theta[count],qr[i])
count=count+1
return qc
| 23.070423
| 90
| 0.436975
| 3,959
| 27,846
| 3.073503
| 0.022733
| 0.159435
| 0.191322
| 0.104701
| 0.976824
| 0.976824
| 0.944362
| 0.943787
| 0.942636
| 0.93335
| 0
| 0.047543
| 0.437262
| 27,846
| 1,206
| 91
| 23.089552
| 0.728973
| 0.133269
| 0
| 0.963072
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028065
| false
| 0
| 0.002954
| 0
| 0.059084
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a17fa38d402250ebd950e30b4811ebb553f4248d
| 785
|
py
|
Python
|
Task/Strip-block-comments/Python/strip-block-comments-2.py
|
LaudateCorpus1/RosettaCodeData
|
9ad63ea473a958506c041077f1d810c0c7c8c18d
|
[
"Info-ZIP"
] | 1
|
2018-11-09T22:08:38.000Z
|
2018-11-09T22:08:38.000Z
|
Task/Strip-block-comments/Python/strip-block-comments-2.py
|
seanwallawalla-forks/RosettaCodeData
|
9ad63ea473a958506c041077f1d810c0c7c8c18d
|
[
"Info-ZIP"
] | null | null | null |
Task/Strip-block-comments/Python/strip-block-comments-2.py
|
seanwallawalla-forks/RosettaCodeData
|
9ad63ea473a958506c041077f1d810c0c7c8c18d
|
[
"Info-ZIP"
] | 1
|
2018-11-09T22:08:40.000Z
|
2018-11-09T22:08:40.000Z
|
def test():
print('\nNON-NESTED BLOCK COMMENT EXAMPLE:')
sample = ''' /**
* Some comments
* longer comments here that we can parse.
*
* Rahoo
*/
function subroutine() {
a = /* inline comment */ b + c ;
}
/*/ <-- tricky comments */
/**
* Another comment.
*/
function something() {
}'''
print(commentstripper(sample))
print('\nNESTED BLOCK COMMENT EXAMPLE:')
sample = ''' /**
* Some comments
* longer comments here that we can parse.
*
* Rahoo
*//*
function subroutine() {
a = /* inline comment */ b + c ;
}
/*/ <-- tricky comments */
*/
/**
* Another comment.
*/
function something() {
}'''
print(commentstripper(sample))
if __name__ == '__main__':
test()
| 18.690476
| 48
| 0.527389
| 71
| 785
| 5.71831
| 0.450704
| 0.059113
| 0.093596
| 0.123153
| 0.881773
| 0.881773
| 0.881773
| 0.881773
| 0.881773
| 0.881773
| 0
| 0
| 0.30828
| 785
| 41
| 49
| 19.146341
| 0.747698
| 0
| 0
| 0.736842
| 0
| 0
| 0.768153
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026316
| false
| 0
| 0
| 0
| 0.026316
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1a1eed799a6904a2b95cdc04e8a3861b07a61ca
| 122,698
|
py
|
Python
|
StocksMA/utils.py
|
mustaphaezzali/StocksMA
|
a6bac0fa1a2e454888b251ffa04c651a925fcec8
|
[
"MIT"
] | 1
|
2022-03-27T21:25:19.000Z
|
2022-03-27T21:25:19.000Z
|
StocksMA/utils.py
|
mustaphaezzali/StocksMA
|
a6bac0fa1a2e454888b251ffa04c651a925fcec8
|
[
"MIT"
] | null | null | null |
StocksMA/utils.py
|
mustaphaezzali/StocksMA
|
a6bac0fa1a2e454888b251ffa04c651a925fcec8
|
[
"MIT"
] | null | null | null |
import random
from functools import wraps
import requests
COMPANIES = {
"ADH": ["Douja Promotion Groupe Addoha", "Addoha P"],
"ADI": ["Alliances Developpement Immobilier S.A.", "Alliances P"],
"AFI": ["Afric Industries S.A.", "Afric Indus."],
"AFM": ["AFMA S.A.", "AFMA P"],
"AGM": ["Agma S.A.", "Agma P"],
"ALM": ["Aluminium du Maroc", "Aluminium Maroc P"],
"ARD": ["Aradei Capital", "Aradei Capital"],
"ATH": ["Auto Hall S.A.", "Auto Hall P"],
"ATL": ["AtlantaSanad", "ATLANTA P"],
"ATW": ["Attijariwafa Bank", "Attijariwafa Bank"],
"BAL": ["Societe Immobiliere Balima", "BALIMA P"],
"BCI": ["Banque Marocaine pour le Commerce et l'Industrie ", "BMCI P"],
"BCP": ["Banque Centrale Populaire S.A.", "BCP P"],
"BOA": ["Bank of Africa", "Bank Of Africa"],
"CDA": ["Centrale Danone", "Central.Danone P/N"],
"CDM": ["Credit du Maroc", "CDM P"],
"CIH": ["Credit Immobilier et Hotelier", "CIH P"],
"CMA": ["Les Ciments du Maroc", "Ciments Maroc"],
"CMT": ["Compagnie Miniere de Touissit S.A.", "CMT"],
"COL": ["Colorado S.A.", "Colorado P"],
"CRS": ["Cartier Saada S.A.", "Cartier Saada P"],
"CSR": ["Cosumar", "COSUMAR"],
"CTM": ["Compagnie de Transports au Maroc S.A.", "CTM P"],
"DHO": ["Delta Holding S.A.", "Delta Holding P"],
"DLM": ["Delattre Levivier Maroc S.A.", "Delattre Lev. P"],
"DWY": ["Disway S.A.", "DISWAY P"],
"EQD": ["Societe d'Equipement Domestique et Menager S.A. ", "EQDOM P"],
"FBR": ["Fenie Brossette S.A.", "FENIE BROSSETTE P"],
"GAZ": ["Afriquia Gaz", "Afriquia Gaz P"],
"HPS": ["Hightech Payment Systems S.A.", "HPS P"],
"IAM": ["Maroc Telecom", "Maroc Telecom"],
"IBC": ["IB Maroc.com S.A.", "IBMaroc.com P"],
"IMO": ["Immorente Invest S.A.", "Immr Invest P"],
"INV": ["Involys", "INVOLYS P"],
"JET": ["Jet Contractors S.A.", "Jet Contractors P"],
"LBV": ["Label Vie", "LABEL VIE P"],
"LES": ["Lesieur Cristal S.A.", "Lesieur Cristal"],
"LHM": ["LafargeHolcim Maroc", "LafargeHolcim P"],
"M2M": ["m2m group S.A.", "M2M Group P"],
"MAB": ["Maghrebail", "Maghrebail P"],
"MDP": ["Med Paper S.A.", "Med Paper P"],
"MIC": ["Microdata S.A.R.L.", "Microdata N"],
"MLE": ["Maroc Leasing S.A.", "Maroc Leasing N"],
"MNG": ["Managem", "Managem P"],
"MOX": ["Maghreb Oxygene", "Maghreb Oxygene P"],
"MSA": ["SODEP-Marsa Maroc", "SODEP P"],
"MUT": ["Mutandis SCA", "Mutandis Br"],
"NEJ": ["Auto Nejma Maroc S.A.", "Auto Nejma P"],
"NKL": ["Ennakl Automobiles", "Ennakl N"],
"PRO": ["Promopharm S.A.", "PROMOPHARM"],
"RDS": ["Residences Dar Saada S.A.", "Resid Dar Saada P"],
"RIS": ["Risma", "Risma P"],
"S2M": ["Societe Maghrebine de Monetique", "S2M P"],
"SAH": ["Saham Assurance S.A.", "Saham Assurance N"],
"SBM": ["Societe des Boissons du Maroc", "Ste Boissons P"],
"SID": ["Societe Nationale de Siderurgie S.A.", "Sonasid P"],
"SLF": ["Salafin", "SALAFIN P"],
"SMI": ["Societe Metallurgique d'Imiter ", "SMI P"],
"SNA": ["Stokvis Nord Afrique", "SNA P"],
"SNP": ["Societe Nationale d'Electrolyse et de Petrochimie ", "SNEP P"],
"SOT": ["Sothema", "SOTHEMA"],
"SRM": ["Societe de Realisations Mecaniques", "SRM P"],
"STR": ["STROC Industrie S.A.", "STROC Indus. P"],
"TGC": ["Travaux Generaux de Construction de Casablanca S.A.", "TGCC"],
"TIM": ["TIMAR S.A.", "Timar P"],
"TMA": ["TotalEnergies Marketing Maroc", "Total Maroc P"],
"TQM": ["Taqa Morocco", "TAQA Morocco P"],
"WAA": ["Wafa Assurance S.A.", "Wafa Assur P"],
"ZDJ": ["Zellidja S.A.", "Zellidja P"],
}
USER_AGENTS = [
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/37.0.2062.94 Chrome/37.0.2062.94 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/600.8.9 (KHTML, like Gecko) Version/8.0.8 Safari/600.8.9",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10240",
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/8.0.7 Safari/600.7.12",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.8.9 (KHTML, like Gecko) Version/7.1.8 Safari/537.85.17",
"Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H143 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12F69 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0)",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/600.6.3 (KHTML, like Gecko) Version/8.0.6 Safari/600.6.3",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/600.5.17 (KHTML, like Gecko) Version/8.0.5 Safari/600.5.17",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D257 Safari/9537.53",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (X11; CrOS x86_64 7077.134.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.156 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/7.1.7 Safari/537.85.16",
"Mozilla/5.0 (Windows NT 6.0; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B466 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/600.3.18 (KHTML, like Gecko) Version/8.0.3 Safari/600.3.18",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_1_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B440 Safari/600.1.4",
"Mozilla/5.0 (Linux; U; Android 4.0.3; en-us; KFTT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12D508 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (iPad; CPU OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D201 Safari/9537.53",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFTHWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.6.3 (KHTML, like Gecko) Version/7.1.6 Safari/537.85.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/600.4.10 (KHTML, like Gecko) Version/8.0.4 Safari/600.4.10",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.78.2 (KHTML, like Gecko) Version/7.0.6 Safari/537.78.2",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B410 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B554a Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; TNJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; ARM; Trident/7.0; Touch; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MDDCJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H143 Safari/600.1.4",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFASWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12F70 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MATBJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; U; Android 4.0.4; en-us; KFJWI Build/IMM76D) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 7_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D167 Safari/9537.53",
"Mozilla/5.0 (X11; CrOS armv7l 7077.134.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.156 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/600.2.5 (KHTML, like Gecko) Version/8.0.2 Safari/600.2.5",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11) AppleWebKit/601.1.56 (KHTML, like Gecko) Version/9.0 Safari/601.1.56",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFSOWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 5_1_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B206 Safari/7534.48.3",
"Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B435 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10240",
"Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; LCJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MDDRJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFAPWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Trident/7.0; Touch; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; LCJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; U; Android 4.0.3; en-us; KFOT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 6_1_3 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10B329 Safari/8536.25",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFARWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; ASU2JS; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 8_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12A405 Safari/600.1.4",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.77.4 (KHTML, like Gecko) Version/7.0.5 Safari/537.77.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; yie11; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MALNJS; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 10.0; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MAGWJS; rv:11.0) like Gecko",
"Mozilla/5.0 (X11; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.5.17 (KHTML, like Gecko) Version/7.1.5 Safari/537.85.14",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; TNJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP06; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/600.4.8 (KHTML, like Gecko) Version/8.0.3 Safari/600.4.8",
"Mozilla/5.0 (iPad; CPU OS 7_0_6 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B651 Safari/9537.53",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.3.18 (KHTML, like Gecko) Version/7.1.3 Safari/537.85.12",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Web Preview) Chrome/27.0.1453 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_0 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12A365 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12H143 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321",
"Mozilla/5.0 (iPad; CPU OS 7_0_3 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B511 Safari/9537.53",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.1.17 (KHTML, like Gecko) Version/7.1 Safari/537.85.10",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.2.5 (KHTML, like Gecko) Version/7.1.2 Safari/537.85.11",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; ASU2JS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.1) Gecko/2008070208 Firefox/3.0.1",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MDDCJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.34 (KHTML, like Gecko) Qt/4.8.5 Safari/534.34",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_0 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11A465 Safari/9537.53 BingPreview/1.0b",
"Mozilla/5.0 (X11; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12H143 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (X11; CrOS x86_64 7262.52.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.86 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MDDCJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.4.10 (KHTML, like Gecko) Version/7.1.4 Safari/537.85.13",
"Mozilla/5.0 (Unknown; Linux x86_64) AppleWebKit/538.1 (KHTML, like Gecko) PhantomJS/2.0.0 Safari/538.1",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MALNJS; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12F69 Safari/600.1.4",
"Mozilla/5.0 (Android; Tablet; rv:40.0) Gecko/40.0 Firefox/40.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D257 Safari/9537.53",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.2.5 (KHTML, like Gecko) Version/8.0.2 Safari/600.2.5",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/536.30.1 (KHTML, like Gecko) Version/6.0.5 Safari/536.30.1",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFSAWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.13.US Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MAAU; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:35.0) Gecko/20100101 Firefox/35.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.74.9 (KHTML, like Gecko) Version/7.0.2 Safari/537.74.9",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 7_0_2 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11A501 Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MAARJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 7_0 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11A465 Safari/9537.53",
"Mozilla/5.0 (Windows NT 10.0; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12F69 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.78.2 (KHTML, like Gecko) Version/7.0.6 Safari/537.78.2",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:36.0) Gecko/20100101 Firefox/36.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MASMJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; FunWebProducts; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MAARJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; BOIE9;ENUS; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T230NU Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE10;ENUSWOL; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 5.1; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Linux; U; Android 4.0.4; en-us; KFJWA Build/IMM76D) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174",
"Mozilla/5.0 (Linux; Android 4.0.4; BNTV600 Build/IMM76L) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.111 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B440 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; yie9; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 5.0.2; SM-T530NU Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A4325c Safari/601.1",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B466 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/7.0)",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12D508 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/44.0.2403.67 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.2; WOW64; Trident/7.0; .NET4.0E; .NET4.0C)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36",
"Mozilla/5.0 (PlayStation 4 2.57) AppleWebKit/537.73 (KHTML, like Gecko)",
"Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0",
"Mozilla/5.0 (Linux; Android 5.0; SM-G900V Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Linux; Android 5.1.1; Nexus 7 Build/LMY48I) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; LCJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/6.0; Touch)",
"Mozilla/5.0 (Linux; Android 5.0.2; SM-T800 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MASMJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; TNJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/537.75.14",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.89 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; ASJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG SCH-I545 4G Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE10;ENUSMSN; rv:11.0) like Gecko",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MATBJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MASAJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; rv:41.0) Gecko/20100101 Firefox/41.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MALC; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/33.0.0.0 Safari/534.24",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MDDCJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; yie10; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 5.0; SAMSUNG-SM-G900A Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Linux; U; Android 4.0.3; en-gb; KFTT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/8.0)",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; TNJB; rv:11.0) like Gecko",
"Mozilla/5.0 (X11; CrOS x86_64 7077.111.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.0.4; BNTV400 Build/IMM76L) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.111 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36 LBBROWSER",
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.76 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0; SAMSUNG SM-G900P Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.18.US Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3; GWX:QUALIFIED)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MDDCJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.13.US Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4043.US Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:23.0) Gecko/20100101 Firefox/23.0",
"Mozilla/5.0 (Windows NT 5.1; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/44.0.2403.89 Chrome/44.0.2403.89 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 6_0_1 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A523 Safari/8536.25",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MANM; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.2000 Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12H143 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:32.0) Gecko/20100101 Firefox/32.0",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; MDDRJS)",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MATBJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.13.US Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (X11; Linux x86_64; U; en-us) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (X11; CrOS x86_64 6946.86.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.91 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; TNJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; MDDRJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12F69 Safari/600.1.4",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D201 Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; GIL 3.5; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:41.0) Gecko/20100101 Firefox/41.0",
"Mozilla/5.0 (Linux; U; Android 4.4.2; en-us; LG-V410/V41010d Build/KOT49I.V41010d) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.1599.103 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/537.75.14",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B411 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MATBJS; rv:11.0) like Gecko",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.34 (KHTML, like Gecko) Qt/4.8.1 Safari/534.34",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; USPortal; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H143",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:40.0) Gecko/20100101 Firefox/40.0.2 Waterfox/40.0.2",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; SMJB; rv:11.0) like Gecko",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; CMDTDF; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (iPad; CPU OS 6_1_2 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10B146 Safari/8536.25",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (MSIE 9.0; Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; TNJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/5.0 (X11; FC Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0",
"Mozilla/5.0 (X11; CrOS armv7l 7262.52.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.86 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MASAJS; rv:11.0) like Gecko",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; MS-RTC LM 8; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; yie11; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10532",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; BOIE9;ENUSMSE; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.2; WOW64; rv:29.0) Gecko/20100101 Firefox/29.0",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; InfoPath.3)",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:29.0) Gecko/20100101 Firefox/29.0",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T320 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/44.0.2403.67 Mobile/12H143 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; 360SE)",
"Mozilla/5.0 (Linux; Android 5.0.2; LG-V410/V41020c Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/34.0.1847.118 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) GSA/7.0.55539 Mobile/11D257 Safari/9537.53",
"Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12F69",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFTHWA Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Android; Mobile; rv:40.0) Gecko/40.0 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4043.US Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-P600 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; rv:35.0) Gecko/20100101 Firefox/35.0",
"Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; 360SE)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; LCJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (X11; CrOS x86_64 6812.88.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.153 Safari/537.36",
"Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; ASU2JS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.65 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/537.16 (KHTML, like Gecko) Version/8.0 Safari/537.16",
"Mozilla/5.0 (Windows NT 6.1; rv:34.0) Gecko/20100101 Firefox/34.0",
"Mozilla/5.0 (Linux; Android 5.0; SAMSUNG SM-N900V 4G Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.3; KFTHWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; CMDTDF; .NET4.0C; .NET4.0E; GWX:QUALIFIED)",
"Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/11D257 Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.1000 Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.2; GT-P5210 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MDDSJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 4.4.2; QTAQZ3 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.2; QMV7B Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MATBJS; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/6.0.51363 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B436 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321",
"Mozilla/5.0 (Linux; U; Android 4.0.3; en-ca; KFTT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1; rv:30.0) Gecko/20100101 Firefox/30.0",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:40.0) Gecko/20100101 Firefox/40.0.2 Waterfox/40.0.2",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; LCJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NISSC; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9) AppleWebKit/537.71 (KHTML, like Gecko) Version/7.0 Safari/537.71",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; MALC; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.0.9895 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MSBrowserIE; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG SM-N910V 4G Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.76 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Linux; Android 5.0.2; SAMSUNG SM-T530NU Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.2 Chrome/38.0.2125.102 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.65 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; LCJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.0; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Linux; Android 5.0.2; SM-T700 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG-SM-N910A Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; ASU2JS; rv:11.0) like Gecko",
"Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:29.0) Gecko/20120101 Firefox/29.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.8 (.NET CLR 3.5.30729)",
"Mozilla/5.0 (X11; CrOS x86_64 7077.95.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.90 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.1000 Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36 LBBROWSER",
"Mozilla/5.0 (Windows NT 6.1; rv:36.0) Gecko/20100101 Firefox/36.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/7.0)",
"Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12B466 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.2; Win64; x64; Trident/6.0; .NET4.0E; .NET4.0C; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727)",
"Mozilla/5.0 (Linux; Android 5.0.2; VK810 4G Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.76.4 (KHTML, like Gecko) Version/7.0.4 Safari/537.76.4",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; SMJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MDDCJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.131 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; BOIE9;ENUS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/6.0.51363 Mobile/12H143 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 5.1; rv:41.0) Gecko/20100101 Firefox/41.0",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.76 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11) AppleWebKit/601.1.50 (KHTML, like Gecko) Version/9.0 Safari/601.1.50",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3; GWX:RESERVED)",
"Mozilla/5.0 (iPad; CPU OS 6_1 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10B141 Safari/8536.25",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/601.1.56 (KHTML, like Gecko) Version/9.0 Safari/601.1.56",
"Mozilla/5.0 (Linux; Android 5.1.1; Nexus 7 Build/LMY47V) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_1_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12B440 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534+ (KHTML, like Gecko) MsnBot-Media /1.0b",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/7.0)",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.3; WOW64; Trident/7.0)",
"Mozilla/5.0 (Linux; Android 5.1.1; SM-G920V Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; ASU2JS; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36",
"Mozilla/5.0 (X11; CrOS x86_64 6680.78.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.102 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T520 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.59 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.2000 Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MAARJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MALNJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T900 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)",
"Mozilla/5.0 (Windows NT 6.2; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.94 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12D508 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:36.0) Gecko/20100101 Firefox/36.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.1.2; GT-N8013 Build/JZO54K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFAPWA Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MALCJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; rv:30.0) Gecko/20100101 Firefox/30.0",
"Mozilla/5.0 (Linux; Android 5.0.1; SM-N910V Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B436 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12B466 Safari/600.1.4",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12A405 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.59 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T310 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.45 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.1.1; Nexus 10 Build/LMY48I) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; TNJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36",
"Mozilla/5.0 (X11; CrOS x86_64 7077.123.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; 360SE)",
"Mozilla/5.0 (Linux; Android 4.4.2; QMV7A Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B554a Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0; SAMSUNG-SM-N900A Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.4; XT1080 Build/SU6-7.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MAARJS; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/6.0.51363 Mobile/12F69 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MALNJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.2000 Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; ASJB; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.73.11 (KHTML, like Gecko) Version/7.0.1 Safari/537.73.11",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/7.0; TNJB; 1ButtonTaskbar)",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36",
"Mozilla/5.0 (Windows Phone 8.1; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 635) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 5_0_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A405 Safari/7534.48.3",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:35.0) Gecko/20100101 Firefox/35.0",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.1.1; SAMSUNG SM-N910P Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321 [Pinterest/iOS]",
"Mozilla/5.0 (Linux; Android 5.0.1; LGLK430 Build/LRX21Y) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/38.0.2125.102 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321 Safari",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/8.0; 1ButtonTaskbar)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP08; NP08; MAAU; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 5.1; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T217S Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE10;ENUSMSE; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.2; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (Windows NT 5.1; rv:35.0) Gecko/20100101 Firefox/35.0",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.76 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36 LBBROWSER",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.1; XT1254 Build/SU3TL-39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; Win64; x64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_1_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12B440 Safari/600.1.4",
"Mozilla/5.0 (MSIE 10.0; Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/44.0.2403.67 Mobile/12F69 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG-SGH-I337 Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.3; KFASWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36",
"Mozilla/5.0 (X11; CrOS armv7l 7077.111.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A403 Safari/8536.25",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:36.0) Gecko/20100101 Firefox/36.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.2; SAMSUNG SM-T800 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.0 Chrome/38.0.2125.102 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0; SM-G900V Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.133 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MAGWJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MALNJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; ATT-IE11; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.103 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7) AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 Safari/534.48.3",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.2; WOW64; Trident/7.0; .NET4.0E; .NET4.0C; .NET CLR 3.5.30729; .NET CLR 2.0.50727; .NET CLR 3.0.30729)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; rv:32.0) Gecko/20100101 Firefox/32.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12D508 Safari/600.1.4",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D167 Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0; MSN 9.0;MSN 9.1;MSN 9.6;MSN 10.0;MSN 10.2;MSN 10.5;MSN 11;MSN 11.5; MSNbMSNI; MSNmen-us; MSNcOTH) like Gecko",
"Mozilla/5.0 (Windows NT 5.1; rv:36.0) Gecko/20100101 Firefox/36.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.0.9895 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/7.0; 1ButtonTaskbar)",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.102 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 YaBrowser/15.7.2357.2877 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:27.0) Gecko/20100101 Firefox/27.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; BOIE9;ENUSMSNIP; rv:11.0) like Gecko",
"Mozilla/5.0 AppleWebKit/999.0 (KHTML, like Gecko) Chrome/99.0 Safari/999.0",
"Mozilla/5.0 (X11; OpenBSD amd64; rv:28.0) Gecko/20100101 Firefox/28.0",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/538.1 (KHTML, like Gecko) PhantomJS/2.0.0 Safari/538.1",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MAGWJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 4.4.2; GT-N5110 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12B410 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:25.7) Gecko/20150824 Firefox/31.9 PaleMoon/25.7.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:31.0) Gecko/20100101 Firefox/31.0",
"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_0 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A4325c Safari/601.1",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; MS-RTC LM 8; InfoPath.3)",
"Mozilla/5.0 (Linux; Android 4.4.2; RCT6203W46 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:31.0) Gecko/20100101 Firefox/31.0",
"Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; Tablet PC 2.0)",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; EIE10;ENUSWOL; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 4.4.4; en-us; SAMSUNG SM-N910T Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Version/2.0 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.2; RCT6203W46 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Linux; U; Android 4.0.4; en-ca; KFJWI Build/IMM76D) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/34.0.1847.116 Chrome/34.0.1847.116 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.45 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; rv:27.0) Gecko/20100101 Firefox/27.0",
"Mozilla/5.0 (Linux; Android 4.4.2; RCT6773W22 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; ASJB; ASJB; MAAU; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B367 Safari/531.21.10",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.7) Gecko/20150824 Firefox/31.9 PaleMoon/25.7.0",
"Mozilla/5.0 (Linux; Android 5.0; SAMSUNG-SM-G870A Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.3; KFSOWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.2)",
"Mozilla/5.0 (Windows NT 5.2; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.0.9895 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE10;ENUSMCM; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 5.1.1; SAMSUNG SM-G920P Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.2 Chrome/38.0.2125.102 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/600.8.9 (KHTML, like Gecko)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:35.0) Gecko/20100101 Firefox/35.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MALCJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.2; rv:29.0) Gecko/20100101 Firefox/29.0 /29.0",
"Mozilla/5.0 (Linux; Android 5.0.2; SM-T550 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Linux; U; Android 4.0.3; en-gb; KFOT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.2; SM-P900 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.1.1; Nexus 9 Build/LMY48I) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T530NU Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (X11; Linux i686; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.1.1; SM-T330NU Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.7.1000 Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:35.0) Gecko/20100101 Firefox/35.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36",
"Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1",
"Mozilla/5.0 (Android; Tablet; rv:34.0) Gecko/34.0 Firefox/34.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MALCJS; rv:11.0) like Gecko",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) GSA/8.0.57838 Mobile/11D257 Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.146 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; yie10; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Ubuntu 14.04) AppleWebKit/537.36 Chromium/35.0.1870.2 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; yie11; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/8.0; TNJB; 1ButtonTaskbar)",
"Mozilla/5.0 (Linux; Android 4.4.2; RCT6773W22 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0; SAMSUNG-SM-G900A Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.8 (.NET CLR 3.5.30729)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.65 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.7.1000 Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP08; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T210R Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; rv:40.0) Gecko/20100101 Firefox/40.0.2 Waterfox/40.0.2",
"Mozilla/5.0 (Linux; Android 5.0; SAMSUNG SM-N900P Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.18.US Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.2; SM-T350 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; ASU2JS; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 5.0.2; SM-T530NU Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.133 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/7.0; 1ButtonTaskbar)",
"Mozilla/5.0 (Linux; Android 5.0.2; SAMSUNG-SM-G920A Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.0 Chrome/38.0.2125.102 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; 360SE)",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MAAU; MAAU; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0 Iceweasel/38.2.1",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MANM; MANM; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534+ (KHTML, like Gecko) BingPreview/1.0b",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.2; QTAQZ3 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.135 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321 OverDrive Media Console/3.3.1",
"Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Mobile/11D257",
"Mozilla/5.0 (iPad; CPU OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) GSA/7.0.55539 Mobile/11D201 Safari/9537.53",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.1; SCH-I545 Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0; SM-G900P Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12A365 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 5.1; rv:34.0) Gecko/20100101 Firefox/34.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; MDDCJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36",
"Mozilla/5.0 (iPad;U;CPU OS 5_1_1 like Mac OS X; zh-cn)AppleWebKit/534.46.0(KHTML, like Gecko)CriOS/19.0.1084.60 Mobile/9B206 Safari/7534.48.3",
"Mozilla/5.0 (Linux; Android 4.4.3; KFAPWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/11D201 Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/43.0.2357.61 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MAMIJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.1; VS985 4G Build/LRX21Y) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12H143 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.0; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0",
"Mozilla/5.0 (Linux; Android 5.0.2; LG-V410/V41020b Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/34.0.1847.118 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36",
"Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B435 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:36.0) Gecko/20100101 Firefox/36.0",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; InfoPath.3; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.2; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MDDRJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.2000 Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.3; WOW64; Trident/6.0)",
"Mozilla/5.0 (Linux; Android 5.1.1; SAMSUNG SM-G920T Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.2 Chrome/38.0.2125.102 Mobile Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3; MS-RTC LM 8)",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.91 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.3; KFTHWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/34.0.0.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.3; KFSAWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1; rv:32.0) Gecko/20100101 Firefox/32.0",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T230NU Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.133 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.2.2; SM-T110 Build/JDQ39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG SM-N910T Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Win64; x64; Trident/7.0)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.89 Safari/537.36",
"Mozilla/5.0 (X11; CrOS armv7l 6946.86.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.94 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0 SeaMonkey/2.35",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T330NU Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 6_0_1 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A8426 Safari/8536.25",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.2; LG-V410 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36 TheWorld 6",
"Mozilla/5.0 (iPad; CPU OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12B410 Safari/600.1.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/600.2.5 (KHTML, like Gecko) Version/8.0 Safari/600.1.25",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; EIE10;ENUSWOL)",
"Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/43.0.2357.61 Mobile/12H143 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/43.0.2357.61 Mobile/12F69 Safari/600.1.4",
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T237P Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.152 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; ATT; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.2; SM-T800 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.133 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; EIE10;ENUSMSN; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MATBJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Linux; U; Android 4.4.2; en-us; LGMS323 Build/KOT49I.MS32310c) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.1599.103 Mobile Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE11;ENUSMSN; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.1000 Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; rv:29.0) Gecko/20100101 Firefox/29.0",
"Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.114 Safari/537.36 Puffin/4.5.0IT",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.131 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; yie8; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-gb; KFTHWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; FunWebProducts; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2505.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; MALNJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; BOIE9;ENUSSEM; rv:11.0) like Gecko",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Win64; x64; Trident/6.0; Touch; WebView/1.0)",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B176 Safari/7534.48.3",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG SPH-L720 Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; yie9; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.89 Safari/537.36",
"Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFSAWA Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (compatible; Windows NT 6.1; Catchpoint) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/29.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:32.0) Gecko/20100101 Firefox/32.0",
"Mozilla/5.0 (Windows NT 6.0; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.4; Z970 Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36",
"Mozilla/5.0 (Linux; Android 5.1.1; Nexus 5 Build/LMY48I) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10",
"Mozilla/5.0 (X11; CrOS armv7l 6812.88.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.153 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_3 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10B329 Safari/8536.25",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MAARJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:36.0) Gecko/20100101 Firefox/36.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; )",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MASAJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; MAARJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 BIDUBrowser/7.6 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MASMJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; Trident/7.0; Touch; rv:11.0) like Gecko",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E; 360SE)",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; InfoPath.3; .NET4.0C; .NET4.0E; MS-RTC LM 8)",
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MAGWJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 5.1.1; SAMSUNG SM-G925T Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.2 Chrome/38.0.2125.102 Mobile Safari/537.36",
"Mozilla/5.0 (X11; CrOS x86_64 6457.107.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; 360SE)",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4.17.9 (KHTML, like Gecko) Version/5.1 Mobile/9B206 Safari/7534.48.3",
"Mozilla/5.0 (Linux; Android 4.2.2; GT-P5113 Build/JDQ39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (X11; Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0 DejaClick/2.5.0.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.154 Safari/537.36 LBBROWSER",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/5.0 (Linux; Android 4.4.3; KFARWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.117 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12B466 Safari/600.1.4",
"Mozilla/5.0 (Unknown; Linux i686) AppleWebKit/534.34 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/534.34",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP08; MAAU; NP08; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 4.4.2; LG-V410 Build/KOT49I.V41010d) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)",
"Mozilla/5.0 (Windows NT 6.1; rv:28.0) Gecko/20100101 Firefox/28.0",
"Mozilla/5.0 (X11; CrOS x86_64 6946.70.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (iPod touch; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H321 Safari/600.1.4",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:38.0) Gecko/20100101 IceDragon/38.0.5 Firefox/38.0.5",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; managedpc; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; MASMJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",
"Mozilla/5.0 (Linux; U; Android 4.0.3; en-ca; KFOT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.2.2; Le Pan TC802A Build/JDQ39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) GSA/6.0.51363 Mobile/11D257 Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36 LBBROWSER",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (Windows NT 6.2; ARM; Trident/7.0; Touch; rv:11.0; WPDesktop; Lumia 1520) like Gecko",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.65 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:42.0) Gecko/20100101 Firefox/42.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_6 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B651 Safari/9537.53",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET4.0C; .NET4.0E)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E; 360SE)",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.103 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:34.0) Gecko/20100101 Firefox/34.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.76 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.87 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; PRU_IE; rv:11.0) like Gecko",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/37.0.2062.120 Chrome/37.0.2062.120 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321 [FBAN/FBIOS;FBAV/38.0.0.6.79;FBBV/14316658;FBDV/iPad4,1;FBMD/iPad;FBSN/iPhone OS;FBSV/8.4.1;FBSS/2; FBCR/;FBID/tablet;FBLC/en_US;FBOP/1]",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP02; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Win64; x64; Trident/4.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (X11; CrOS x86_64 6946.63.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:37.0) Gecko/20100101 Firefox/37.0",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.0.9895 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.4.4; Nexus 7 Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36",
"Mozilla/5.0 (Linux; Android 4.2.2; QMV7B Build/JDQ39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.114 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MASMJS; rv:11.0) like Gecko",
"Mozilla/5.0 (compatible; MSIE 10.0; AOL 9.7; AOLBuild 4343.1028; Windows NT 6.1; WOW64; Trident/7.0)",
"Mozilla/5.0 (Linux; U; Android 4.0.3; en-us) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.59 Mobile Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Trident/7.0; Touch; TNJB; rv:11.0) like Gecko",
"Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12B466",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; Active Content Browser)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Win64; x64; Trident/6.0; WebView/1.0)",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.89 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.91 Safari/537.36",
"Mozilla/5.0 (iPad; U; CPU OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) coc_coc_browser/50.0.125 Chrome/44.0.2403.125 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MAARJS; rv:11.0) like Gecko",
"Mozilla/5.0 (Linux; Android 5.0; SAMSUNG SM-N900T Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12H143 Safari/600.1.4",
]
def rand_agent() -> str:
"""Select a random User-Agent from USER_AGENTS
Returns:
str: User-Agent string
"""
return random.choice(USER_AGENTS)
def remove_duplicates(string: str) -> str:
"""Remove duplicated words in a string
Args:
string (str): Names of items from income statement, balance sheet and cash flow Dataframes
Returns:
str: String without the duplicates
"""
words = string.split()
# HACK: remove duplicates keeping the order
return " ".join(sorted(set(words), key=words.index))
def get_request(url: str) -> requests.models.Response:
"""Make a request
Args:
url (str): Resource URL
Returns:
requests.models.Response: JSON or HTML Response
"""
headers = {"User-Agent": rand_agent()}
return requests.get(url, headers=headers)
def check_company_existence(func):
"""
This decorator is used to check if the company is in the list of companies
Should be used with functions that take a company as **first** argument
Raises:
Exception: The exception is raised when the company is not found in
the COMPAINES dict.
"""
@wraps(func)
def wrapper(*args, **kwargs):
company = args[0]
if not isinstance(company, str) or company.upper() not in COMPANIES.keys():
raise Exception(
f"Ticker {company} is not found, use get_tickers() to get a list of available tickers"
)
return func(*args, **kwargs)
return wrapper
| 107.913808
| 244
| 0.687925
| 23,892
| 122,698
| 3.513142
| 0.034363
| 0.023947
| 0.102507
| 0.122844
| 0.938596
| 0.93638
| 0.931829
| 0.929375
| 0.919236
| 0.907596
| 0
| 0.22346
| 0.141005
| 122,698
| 1,136
| 245
| 108.008803
| 0.57292
| 0.005558
| 0
| 0
| 0
| 0.913998
| 0.917959
| 0.005455
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004575
| false
| 0
| 0.002745
| 0
| 0.011894
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
62969750df6302f572065c986fbe93eb6d15b774
| 4,654
|
py
|
Python
|
linkml_runtime/utils/dataclass_extensions_376.py
|
joeflack4/linkml-runtime
|
58443ff90da419404a77386e7aa790ece4b12354
|
[
"CC0-1.0"
] | 1
|
2021-10-04T18:22:12.000Z
|
2021-10-04T18:22:12.000Z
|
linkml_runtime/utils/dataclass_extensions_376.py
|
joeflack4/linkml-runtime
|
58443ff90da419404a77386e7aa790ece4b12354
|
[
"CC0-1.0"
] | 10
|
2021-06-16T20:48:32.000Z
|
2021-10-04T18:22:02.000Z
|
linkml_runtime/utils/dataclass_extensions_376.py
|
joeflack4/linkml-runtime
|
58443ff90da419404a77386e7aa790ece4b12354
|
[
"CC0-1.0"
] | null | null | null |
import sys
if sys.version_info < (3, 7, 0):
raise NotImplementedError("LinkML requires Python 3.7 or later to run")
elif sys.version_info >= (3, 7, 6):
from dataclasses import MISSING, _HAS_DEFAULT_FACTORY, _POST_INIT_NAME, _FIELD_INITVAR, _init_param, _field_init, _create_fn
def dataclasses_init_fn_with_kwargs(fields, frozen, has_post_init, self_name, globals):
# fields contains both real fields and InitVar pseudo-fields.
# Make sure we don't have fields without defaults following fields
# with defaults. This actually would be caught when exec-ing the
# function source code, but catching it here gives a better error
# message, and future-proofs us in case we build up the function
# using ast.
seen_default = False
for f in fields:
# Only consider fields in the __init__ call.
if f.init:
if not (f.default is MISSING and f.default_factory is MISSING):
seen_default = True
elif seen_default:
raise TypeError(f'non-default argument {f.name!r} '
'follows default argument')
locals = {f'_type_{f.name}': f.type for f in fields}
locals.update({
'MISSING': MISSING,
'_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY,
})
body_lines = []
for f in fields:
line = _field_init(f, frozen, locals, self_name)
# line is None means that this field doesn't require
# initialization (it's a pseudo-field). Just skip it.
if line:
body_lines.append(line)
# Does this class have a post-init function?
if has_post_init:
params_str = ','.join(f.name for f in fields
if f._field_type is _FIELD_INITVAR)
body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str}{", " if params_str else ""} **kwargs)')
# If no body lines, use 'pass'.
if not body_lines:
body_lines = ['pass']
return _create_fn('__init__',
[self_name] + [_init_param(f) for f in fields if f.init] + ["**kwargs"],
body_lines,
locals=locals,
globals=globals,
return_type=None)
else:
from dataclasses import MISSING, _HAS_DEFAULT_FACTORY, _POST_INIT_NAME, _FIELD_INITVAR, _init_param, _field_init, \
_create_fn
def dataclasses_init_fn_with_kwargs(fields, frozen, has_post_init, self_name):
# fields contains both real fields and InitVar pseudo-fields.
# Make sure we don't have fields without defaults following fields
# with defaults. This actually would be caught when exec-ing the
# function source code, but catching it here gives a better error
# message, and future-proofs us in case we build up the function
# using ast.
seen_default = False
for f in fields:
# Only consider fields in the __init__ call.
if f.init:
if not (f.default is MISSING and f.default_factory is MISSING):
seen_default = True
elif seen_default:
raise TypeError(f'non-default argument {f.name!r} '
'follows default argument')
globals = {'MISSING': MISSING,
'_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY}
body_lines = []
for f in fields:
line = _field_init(f, frozen, globals, self_name)
# line is None means that this field doesn't require
# initialization (it's a pseudo-field). Just skip it.
if line:
body_lines.append(line)
# Does this class have a post-init function?
if has_post_init:
params_str = ','.join(f.name for f in fields
if f._field_type is _FIELD_INITVAR)
body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str}{", " if params_str else ""} **kwargs)')
# If no body lines, use 'pass'.
if not body_lines:
body_lines = ['pass']
locals = {f'_type_{f.name}': f.type for f in fields}
return _create_fn('__init__',
[self_name] + [_init_param(f) for f in fields if f.init] + ["**kwargs"],
body_lines,
locals=locals,
globals=globals,
return_type=None)
| 43.092593
| 128
| 0.572841
| 582
| 4,654
| 4.340206
| 0.214777
| 0.049881
| 0.023753
| 0.047506
| 0.953286
| 0.940618
| 0.940618
| 0.940618
| 0.940618
| 0.940618
| 0
| 0.002642
| 0.349377
| 4,654
| 107
| 129
| 43.495327
| 0.831572
| 0.234852
| 0
| 0.735294
| 0
| 0
| 0.123799
| 0.025438
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0.029412
| 0.044118
| 0
| 0.102941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a280c7f9f7ee7a59624d63db8ace6327c3423a9
| 2,699
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowKeyChain/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowKeyChain/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowKeyChain/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"key_chains": {
"bla": {
"keys": {
1: {
"accept_lifetime": {
"end": "always valid",
"is_valid": True,
"start": "always valid",
},
"key_string": "cisco123",
"send_lifetime": {
"end": "always valid",
"is_valid": True,
"start": "always valid",
},
},
2: {
"accept_lifetime": {
"end": "06:01:00 UTC Jan 1 2010",
"is_valid": False,
"start": "10:10:10 UTC Jan 1 2002",
},
"key_string": "blabla",
"send_lifetime": {
"end": "06:01:00 UTC Jan 1 2010",
"is_valid": False,
"start": "10:10:10 UTC Jan 1 2002",
},
},
},
},
"cisco": {
"keys": {
1: {
"accept_lifetime": {
"end": "infinite",
"is_valid": True,
"start": "11:11:11 UTC Mar 1 2001",
},
"key_string": "cisco123",
"send_lifetime": {
"end": "infinite",
"is_valid": True,
"start": "11:11:11 UTC Mar 1 2001",
},
},
2: {
"accept_lifetime": {
"end": "22:11:11 UTC Dec 20 2030",
"is_valid": True,
"start": "11:22:11 UTC Jan 1 2001",
},
"key_string": "cisco234",
"send_lifetime": {
"end": "always valid",
"is_valid": True,
"start": "always valid",
},
},
3: {
"accept_lifetime": {
"end": "always valid",
"is_valid": True,
"start": "always valid",
},
"key_string": "cisco",
"send_lifetime": {
"end": "always valid",
"is_valid": True,
"start": "always valid",
},
},
},
},
},
}
| 34.602564
| 59
| 0.272694
| 176
| 2,699
| 4.028409
| 0.227273
| 0.155148
| 0.124118
| 0.180536
| 0.815233
| 0.767278
| 0.726375
| 0.726375
| 0.726375
| 0.726375
| 0
| 0.09763
| 0.609114
| 2,699
| 77
| 60
| 35.051948
| 0.574408
| 0
| 0
| 0.597403
| 0
| 0
| 0.271212
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7e436d634e5f5f4304b94f97318200712487794
| 253
|
py
|
Python
|
myCompany/dashboard/mixins/dashboard_mixin.py
|
Rom4eg/myCompany
|
31846a861d8b0560191e2e1d9791f101b88874df
|
[
"MIT"
] | null | null | null |
myCompany/dashboard/mixins/dashboard_mixin.py
|
Rom4eg/myCompany
|
31846a861d8b0560191e2e1d9791f101b88874df
|
[
"MIT"
] | null | null | null |
myCompany/dashboard/mixins/dashboard_mixin.py
|
Rom4eg/myCompany
|
31846a861d8b0560191e2e1d9791f101b88874df
|
[
"MIT"
] | null | null | null |
class DashboardMixin(object):
def getTitle(self):
raise NotImplementedError("You must override this method in a child class.")
def getContent(self):
raise NotImplementedError("You must override this method in a child class.")
| 28.111111
| 84
| 0.715415
| 31
| 253
| 5.83871
| 0.548387
| 0.099448
| 0.309392
| 0.342541
| 0.729282
| 0.729282
| 0.729282
| 0.729282
| 0.729282
| 0.729282
| 0
| 0
| 0.209486
| 253
| 8
| 85
| 31.625
| 0.905
| 0
| 0
| 0.4
| 0
| 0
| 0.373016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
3d6ee9d16266408c60d5523ae7c5cd8a92f11538
| 20,742
|
py
|
Python
|
autograd/tests/test_trigo.py
|
pmaederyork/Dragrongrad
|
32794d561f8d0273592ed55d315013eab2c24b8b
|
[
"MIT"
] | 3
|
2018-12-17T16:24:11.000Z
|
2020-06-03T22:40:50.000Z
|
autograd/tests/test_trigo.py
|
cs207-project-group4/project-repo
|
d5ee88d2a7d16477d816d830ba90d241a05e3b48
|
[
"MIT"
] | 2
|
2018-10-18T17:59:26.000Z
|
2018-12-08T16:06:34.000Z
|
autograd/tests/test_trigo.py
|
cs207-project-group4/project-repo
|
d5ee88d2a7d16477d816d830ba90d241a05e3b48
|
[
"MIT"
] | 1
|
2019-08-19T06:06:13.000Z
|
2019-08-19T06:06:13.000Z
|
# -*- coding: utf-8 -*-
from autograd.blocks.trigo import sin
from autograd.blocks.trigo import cos
from autograd.blocks.trigo import tan
from autograd.blocks.trigo import arcsin
from autograd.blocks.trigo import arccos
from autograd.blocks.trigo import arctan
from autograd.variable import Variable
import numpy as np
import autograd as ad
def test_sin_forward():
ad.set_mode('forward')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
sin_block=sin()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=sin_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.sin(data)
gradient_true=np.diag(np.cos(data))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong sin data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong sin gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_sin_reverse():
ad.set_mode('reverse')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
sin_block=sin()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=sin_block(x)
# =============================================================================
# Compute gradient backwards
# =============================================================================
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.sin(data)
gradient_true=np.diag(np.cos(data))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong sin data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong sin gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_cos_forward():
ad.set_mode('forward')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
cos_block=cos()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=cos_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.cos(data)
gradient_true=np.diag(-np.sin(data))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong cos data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong cos gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_cos_reverse():
ad.set_mode('reverse')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
cos_block=cos()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=cos_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.cos(data)
gradient_true=np.diag(-np.sin(data))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong cos data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong cos gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_tan_forward():
ad.set_mode('forward')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
tan_block=tan()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=tan_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.tan(data)
gradient_true=np.diag(1/np.cos(data)**2)
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong tan data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong tan gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_tan_reverse():
ad.set_mode('reverse')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
tan_block=tan()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=tan_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.tan(data)
gradient_true=np.diag(1/np.cos(data)**2)
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong tan data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong tan gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_arcsin_forward():
ad.set_mode('forward')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
arcsin_block=arcsin()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=arcsin_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.arcsin(data)
gradient_true= np.diag(1/(np.sqrt(1 - data**2)))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong arcsin data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong arcsin gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_arcsin_reverse():
ad.set_mode('reverse')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
arcsin_block=arcsin()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=arcsin_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.arcsin(data)
gradient_true= np.diag(1/(np.sqrt(1 - data**2)))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong arcsin data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong arcsin gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_arccos_forward():
ad.set_mode('forward')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
arccos_block=arccos()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=arccos_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.arccos(data)
gradient_true= np.diag(-1/(np.sqrt(1 - data**2)))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong arccos data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong arccos gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_arccos_reverse():
ad.set_mode('reverse')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
arccos_block=arccos()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=arccos_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.arccos(data)
gradient_true= np.diag(-1/(np.sqrt(1 - data**2)))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong arccos data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong arccos gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_arctan_forward():
ad.set_mode('forward')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
arctan_block=arctan()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=arctan_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.arctan(data)
gradient_true= np.diag(1/(1 + data**2))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong arctan data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong arctan gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
def test_arctan_reverse():
ad.set_mode('reverse')
# =============================================================================
# define the input variable
# =============================================================================
data=np.random.random(5)
x=Variable(data)
# =============================================================================
# define custom block
# =============================================================================
arctan_block=arctan()
# =============================================================================
# compute output of custom block
# =============================================================================
y_block=arctan_block(x)
y_block.compute_gradients()
# =============================================================================
# define expected output
# =============================================================================
data_true=np.arctan(data)
gradient_true= np.diag(1/(1 + data**2))
# =============================================================================
# assert data pass
# =============================================================================
assert np.equal(data_true, y_block.data).all(), 'wrong arctan data pass. expected {}, given{}'.format(data_true, y_block.data)
# =============================================================================
# assert gradient forward pass
# =============================================================================
assert np.equal(gradient_true, y_block.gradient).all(), 'wrong arctan gradient forward pass. expected {}, given{}'.format(gradient_true,y_block.gradient)
ad.set_mode('forward')
| 46.093333
| 157
| 0.311494
| 1,337
| 20,742
| 4.670157
| 0.040389
| 0.069186
| 0.076874
| 0.065343
| 0.974856
| 0.946989
| 0.946989
| 0.946989
| 0.946989
| 0.946989
| 0
| 0.001847
| 0.086443
| 20,742
| 449
| 158
| 46.195991
| 0.327669
| 0.64285
| 0
| 0.852113
| 0
| 0
| 0.174621
| 0
| 0
| 1
| 0
| 0
| 0.169014
| 1
| 0.084507
| false
| 0.169014
| 0.06338
| 0
| 0.147887
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
3d722ff1d8e11fc05c728a1ae851428b91b58d8c
| 12,032
|
py
|
Python
|
devel/lib/python2.7/dist-packages/interbotix_moveit_interface/srv/_MoveItPlan.py
|
Jam-cpu/Masters-Project---Final
|
0b266b1f117a579b96507249f0a128d0e3cc082a
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
devel/lib/python2.7/dist-packages/interbotix_moveit_interface/srv/_MoveItPlan.py
|
Jam-cpu/Masters-Project---Final
|
0b266b1f117a579b96507249f0a128d0e3cc082a
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
devel/lib/python2.7/dist-packages/interbotix_moveit_interface/srv/_MoveItPlan.py
|
Jam-cpu/Masters-Project---Final
|
0b266b1f117a579b96507249f0a128d0e3cc082a
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from interbotix_moveit_interface/MoveItPlanRequest.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import geometry_msgs.msg
class MoveItPlanRequest(genpy.Message):
_md5sum = "83858a8a41306e5b1efdbc05501e2275"
_type = "interbotix_moveit_interface/MoveItPlanRequest"
_has_header = False # flag to mark the presence of a Header object
_full_text = """# Send commands to the moveit_plan server
#
# Enum values that define the commands available for the server. Note that all
# ee_poses (defined in the 'ee_arm_link' frame) are relative to the 'world' frame.
# There are 4 options:
# 1) CMD_PLAN_POSE - Desired ee_pose which is made up of a position and orientation element
# 2) CMD_PLAN_POSITION - Desired ee_position which is made up of a position element only; orientation is not constrained
# 3) CMD_PLAN_ORIENTATION - Desired ee_orientation which is made up of an orientation element only; position is not constrained
# 4) CMD_EXECUTE - Once a plan is available, this command executes the planned trajectory on the gazebo or physical robot
int8 CMD_PLAN_POSE = 1
int8 CMD_PLAN_POSITION = 2
int8 CMD_PLAN_ORIENTATION = 3
int8 CMD_EXECUTE = 4
int8 cmd
# desired ee_pose, position, or orientation
geometry_msgs/Pose ee_pose
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of position and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
"""
# Pseudo-constants
CMD_PLAN_POSE = 1
CMD_PLAN_POSITION = 2
CMD_PLAN_ORIENTATION = 3
CMD_EXECUTE = 4
__slots__ = ['cmd','ee_pose']
_slot_types = ['int8','geometry_msgs/Pose']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
cmd,ee_pose
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(MoveItPlanRequest, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.cmd is None:
self.cmd = 0
if self.ee_pose is None:
self.ee_pose = geometry_msgs.msg.Pose()
else:
self.cmd = 0
self.ee_pose = geometry_msgs.msg.Pose()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_b7d().pack(_x.cmd, _x.ee_pose.position.x, _x.ee_pose.position.y, _x.ee_pose.position.z, _x.ee_pose.orientation.x, _x.ee_pose.orientation.y, _x.ee_pose.orientation.z, _x.ee_pose.orientation.w))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.ee_pose is None:
self.ee_pose = geometry_msgs.msg.Pose()
end = 0
_x = self
start = end
end += 57
(_x.cmd, _x.ee_pose.position.x, _x.ee_pose.position.y, _x.ee_pose.position.z, _x.ee_pose.orientation.x, _x.ee_pose.orientation.y, _x.ee_pose.orientation.z, _x.ee_pose.orientation.w,) = _get_struct_b7d().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_b7d().pack(_x.cmd, _x.ee_pose.position.x, _x.ee_pose.position.y, _x.ee_pose.position.z, _x.ee_pose.orientation.x, _x.ee_pose.orientation.y, _x.ee_pose.orientation.z, _x.ee_pose.orientation.w))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.ee_pose is None:
self.ee_pose = geometry_msgs.msg.Pose()
end = 0
_x = self
start = end
end += 57
(_x.cmd, _x.ee_pose.position.x, _x.ee_pose.position.y, _x.ee_pose.position.z, _x.ee_pose.orientation.x, _x.ee_pose.orientation.y, _x.ee_pose.orientation.z, _x.ee_pose.orientation.w,) = _get_struct_b7d().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_b7d = None
def _get_struct_b7d():
global _struct_b7d
if _struct_b7d is None:
_struct_b7d = struct.Struct("<b7d")
return _struct_b7d
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from interbotix_moveit_interface/MoveItPlanResponse.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import std_msgs.msg
class MoveItPlanResponse(genpy.Message):
_md5sum = "6815ecbafa5176f5e638e681f8dd5385"
_type = "interbotix_moveit_interface/MoveItPlanResponse"
_has_header = False # flag to mark the presence of a Header object
_full_text = """# message if the planning or execution was successful; this is displayed in the custom GUI.
std_msgs/String msg
# boolean to easily check if the planning or execution was successful.
bool success
================================================================================
MSG: std_msgs/String
string data
"""
__slots__ = ['msg','success']
_slot_types = ['std_msgs/String','bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
msg,success
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(MoveItPlanResponse, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.msg is None:
self.msg = std_msgs.msg.String()
if self.success is None:
self.success = False
else:
self.msg = std_msgs.msg.String()
self.success = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.msg.data
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.success
buff.write(_get_struct_B().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.msg is None:
self.msg = std_msgs.msg.String()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.msg.data = str[start:end].decode('utf-8', 'rosmsg')
else:
self.msg.data = str[start:end]
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.msg.data
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.success
buff.write(_get_struct_B().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.msg is None:
self.msg = std_msgs.msg.String()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.msg.data = str[start:end].decode('utf-8', 'rosmsg')
else:
self.msg.data = str[start:end]
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
class MoveItPlan(object):
_type = 'interbotix_moveit_interface/MoveItPlan'
_md5sum = 'eff4f39614e2625df73027e93071de20'
_request_class = MoveItPlanRequest
_response_class = MoveItPlanResponse
| 35.916418
| 231
| 0.662982
| 1,690
| 12,032
| 4.532544
| 0.14142
| 0.031332
| 0.025587
| 0.037598
| 0.751175
| 0.749217
| 0.739948
| 0.720235
| 0.720235
| 0.720235
| 0
| 0.016601
| 0.203956
| 12,032
| 334
| 232
| 36.023952
| 0.783149
| 0.204205
| 0
| 0.704348
| 1
| 0.013043
| 0.263169
| 0.062057
| 0
| 0
| 0.002181
| 0
| 0
| 1
| 0.069565
| false
| 0
| 0.043478
| 0
| 0.256522
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d8cfc5173ede3039c49f7714d04bd7cf9c80f07
| 15,840
|
py
|
Python
|
run_all_trials.py
|
paul028/EE290_Localization_Project
|
82562de4b1180b61b314fb2dc03a04ff6c1ad34c
|
[
"MIT"
] | null | null | null |
run_all_trials.py
|
paul028/EE290_Localization_Project
|
82562de4b1180b61b314fb2dc03a04ff6c1ad34c
|
[
"MIT"
] | null | null | null |
run_all_trials.py
|
paul028/EE290_Localization_Project
|
82562de4b1180b61b314fb2dc03a04ff6c1ad34c
|
[
"MIT"
] | null | null | null |
import subprocess
import time
print("Opening")
#extra trees original
subprocess.call('python extratress.py --trial-name "ext" --pca 0 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=44" --pca 44 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=40" --pca 40 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=5" --pca 5 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=3" --pca 3 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=10" --pca 10 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=7" --pca 7 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+SF" --pca 0 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=44+SF" --pca 44 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=40+SF" --pca 40 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=10+SF" --pca 10 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=7+SF" --pca 7 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=5+SF" --pca 5 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=3+SF" --pca 3 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext_SFD" --pca 0 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=44_SFD" --pca 44 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=40_SFD" --pca 40 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=10_SFD" --pca 10 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=7_SFD" --pca 7 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=5_SFD" --pca 5 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=3_SFD" --pca 3 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+SF_SFD" --pca 0 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=44+SF_SFD" --pca 44 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=40+SF_SFD" --pca 40 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=10+SF_SFD" --pca 10 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=7+SF_SFD" --pca 7 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=5+SF_SFD" --pca 5 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress.py --trial-name "ext+PCA=3+SF_SFD" --pca 3 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
#extra trees modified
subprocess.call('python extratress_mod.py --trial-name "ext" --pca 0 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=44" --pca 44 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=40" --pca 40 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=5" --pca 5 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=3" --pca 3 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=10" --pca 10 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=7" --pca 7 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+SF" --pca 0 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=44+SF" --pca 44 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=40+SF" --pca 40 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=10+SF" --pca 10 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=7+SF" --pca 7 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=5+SF" --pca 5 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=3+SF" --pca 3 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext_SFD" --pca 0 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=44_SFD" --pca 44 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=40_SFD" --pca 40 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=10_SFD" --pca 10 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=7_SFD" --pca 7 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=5_SFD" --pca 5 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=3_SFD" --pca 3 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+SF_SFD" --pca 0 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=44+SF_SFD" --pca 44 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=40+SF_SFD" --pca 40 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=10+SF_SFD" --pca 10 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=7+SF_SFD" --pca 7 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=5+SF_SFD" --pca 5 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python extratress_mod.py --trial-name "ext+PCA=3+SF_SFD" --pca 3 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
#original
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP" --pca 0 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=44" --pca 44 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=40" --pca 40 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=5" --pca 5 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=3" --pca 3 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=10" --pca 10 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=7" --pca 7 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+SF" --pca 0 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=44+SF" --pca 44 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=40+SF" --pca 40 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=10+SF" --pca 10 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=7+SF" --pca 7 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=5+SF" --pca 5 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=3+SF" --pca 3 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP_SFD" --pca 0 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=44_SFD" --pca 44 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=40_SFD" --pca 40 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=10_SFD" --pca 10 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=7_SFD" --pca 7 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=5_SFD" --pca 5 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=3_SFD" --pca 3 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+SF_SFD" --pca 0 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=44+SF_SFD" --pca 44 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=40+SF_SFD" --pca 40 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=10+SF_SFD" --pca 10 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=7+SF_SFD" --pca 7 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=5+SF_SFD" --pca 5 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF.py --trial-name "MLP+PCA=3+SF_SFD" --pca 3 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
#modified
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP" --pca 0 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=44" --pca 44 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=40" --pca 40 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=5" --pca 5 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=3" --pca 3 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=10" --pca 10 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=7" --pca 7 --epoch 10000 --patience 300 --sf 0 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+SF" --pca 0 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=44+SF" --pca 44 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=40+SF" --pca 40 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=10+SF" --pca 10 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=7+SF" --pca 7 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=5+SF" --pca 5 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=3+SF" --pca 3 --epoch 10000 --patience 300 --sf 1 --oor 1',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP_SFD" --pca 0 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=44_SFD" --pca 44 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=40_SFD" --pca 40 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=10_SFD" --pca 10 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=7_SFD" --pca 7 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=5_SFD" --pca 5 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=3_SFD" --pca 3 --epoch 10000 --patience 300 --sf 0 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+SF_SFD" --pca 0 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=44+SF_SFD" --pca 44 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=40+SF_SFD" --pca 40 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=10+SF_SFD" --pca 10 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=7+SF_SFD" --pca 7 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=5+SF_SFD" --pca 5 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
subprocess.call('python MLP_withPCA+SF_dataset_modified.py --trial-name "MLP+PCA=3+SF_SFD" --pca 3 --epoch 10000 --patience 300 --sf 1 --oor 2',shell=True)
time.sleep(0.2)
| 62.608696
| 157
| 0.70947
| 2,978
| 15,840
| 3.707858
| 0.013432
| 0.142003
| 0.202862
| 0.182576
| 0.991849
| 0.991849
| 0.991849
| 0.991849
| 0.991849
| 0.989132
| 0
| 0.083624
| 0.094066
| 15,840
| 253
| 158
| 62.608696
| 0.685854
| 0.003535
| 0
| 0.493392
| 0
| 0.475771
| 0.662252
| 0.060326
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.008811
| 0
| 0.008811
| 0.004405
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
9a78ae7472f71b7a2caa381b84cb0d09c35b7afe
| 125
|
py
|
Python
|
cadcad_machine_search/visualizations/__init__.py
|
cadCAD-org/cadCAD_machine_search
|
ed6a59162044d0808007fd7d9122474e7f547522
|
[
"MIT"
] | 2
|
2021-05-03T10:46:54.000Z
|
2021-06-07T00:12:48.000Z
|
cadcad_machine_search/visualizations/__init__.py
|
danlessa/cadCAD_machine_search
|
3b8bf54672c7cc992cc00c0aa0ef73e82ba8d826
|
[
"MIT"
] | null | null | null |
cadcad_machine_search/visualizations/__init__.py
|
danlessa/cadCAD_machine_search
|
3b8bf54672c7cc992cc00c0aa0ef73e82ba8d826
|
[
"MIT"
] | 2
|
2021-02-10T22:32:38.000Z
|
2021-05-27T21:17:06.000Z
|
from cadcad_machine_search.visualizations.sensitivity import *
from cadcad_machine_search.visualizations.goal_impact import *
| 62.5
| 62
| 0.896
| 15
| 125
| 7.133333
| 0.6
| 0.186916
| 0.317757
| 0.429907
| 0.691589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056
| 125
| 2
| 63
| 62.5
| 0.90678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9ad68e5f18b2f624a464315bbde2ca169fe0d2d3
| 104
|
py
|
Python
|
odoo-13.0/addons/website_forum/tests/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/website_forum/tests/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/website_forum/tests/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import common
from . import test_forum
from . import test_forum_process
| 17.333333
| 32
| 0.711538
| 15
| 104
| 4.733333
| 0.6
| 0.422535
| 0.394366
| 0.535211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 0.173077
| 104
| 5
| 33
| 20.8
| 0.813953
| 0.201923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b1780c19c3247aa519affae582dca78c2af61f83
| 274
|
py
|
Python
|
deepaccess/train/__init__.py
|
jhammelman/DeepAccessTransfer
|
8ca978873e2fcb1b95d90902e3fb38e710027776
|
[
"MIT"
] | 2
|
2021-08-16T18:34:59.000Z
|
2022-02-19T16:05:21.000Z
|
deepaccess/train/__init__.py
|
jhammelman/DeepAccessTransfer
|
8ca978873e2fcb1b95d90902e3fb38e710027776
|
[
"MIT"
] | null | null | null |
deepaccess/train/__init__.py
|
jhammelman/DeepAccessTransfer
|
8ca978873e2fcb1b95d90902e3fb38e710027776
|
[
"MIT"
] | 1
|
2021-05-26T21:54:53.000Z
|
2021-05-26T21:54:53.000Z
|
from deepaccess.ensemble_utils import ensure_dir
from deepaccess.ensemble_utils import act_to_class
from deepaccess.ensemble_utils import fa_to_onehot
from deepaccess.interpret.importance_utils import *
from .CNN import *
from .DeepAccessModel import *
from .train import *
| 34.25
| 51
| 0.854015
| 38
| 274
| 5.921053
| 0.447368
| 0.248889
| 0.293333
| 0.36
| 0.44
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10219
| 274
| 7
| 52
| 39.142857
| 0.914634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
491086975223980d12e9b84434ff1c3412423e26
| 4,105
|
py
|
Python
|
locust/test/test_old_wait_api.py
|
ghoshben/locust
|
4e515cc78b24a9d452ac1a8832b562ace7aed14e
|
[
"MIT"
] | 1
|
2020-01-13T03:18:29.000Z
|
2020-01-13T03:18:29.000Z
|
locust/test/test_old_wait_api.py
|
ulshi/locust
|
551d169c194070522c66dad1da67523c03087bc8
|
[
"MIT"
] | 1
|
2020-03-06T11:12:34.000Z
|
2020-03-06T11:12:34.000Z
|
locust/test/test_old_wait_api.py
|
ulshi/locust
|
551d169c194070522c66dad1da67523c03087bc8
|
[
"MIT"
] | 1
|
2020-03-06T11:02:00.000Z
|
2020-03-06T11:02:00.000Z
|
import warnings
from locust import InterruptTaskSet, ResponseError
from locust.core import HttpLocust, Locust, TaskSet, events, task
from locust.exception import (CatchResponseError, LocustError, RescheduleTask,
RescheduleTaskImmediately)
from locust.wait_time import between, constant
from .testcases import LocustTestCase, WebserverTestCase
class TestOldWaitApi(LocustTestCase):
def setUp(self):
super(TestOldWaitApi, self).setUp()
def test_wait_function(self):
with warnings.catch_warnings(record=True) as w:
class User(Locust):
wait_function = lambda self: 5000
class MyTaskSet(TaskSet):
pass
taskset = MyTaskSet(User())
self.assertEqual(5, taskset.wait_time())
self.assertEqual(1, len(w))
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn("wait_function", str(w[0].message))
def test_wait_function_on_taskset(self):
with warnings.catch_warnings(record=True) as w:
class User(Locust):
pass
class MyTaskSet(TaskSet):
wait_function = lambda self: 5000
taskset = MyTaskSet(User())
self.assertEqual(5, taskset.wait_time())
self.assertEqual(1, len(w))
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn("wait_function", str(w[0].message))
def test_min_max_wait(self):
with warnings.catch_warnings(record=True) as w:
class User(Locust):
min_wait = 1000
max_wait = 1000
class TS(TaskSet):
@task
def t(self):
pass
taskset = TS(User())
self.assertEqual(1, taskset.wait_time())
self.assertEqual(1, len(w))
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn("min_wait", str(w[0].message))
self.assertIn("max_wait", str(w[0].message))
def test_zero_min_max_wait(self):
return
with warnings.catch_warnings(record=True) as w:
class User(Locust):
min_wait = 0
max_wait = 0
class TS(TaskSet):
@task
def t(self):
pass
taskset = TS(User())
self.assertEqual(0, taskset.wait_time())
self.assertEqual(1, len(w))
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn("min_wait", str(w[0].message))
self.assertIn("max_wait", str(w[0].message))
def test_min_max_wait_combined_with_wait_time(self):
with warnings.catch_warnings(record=True) as w:
class User(Locust):
min_wait = 1000
max_wait = 1000
class TS(TaskSet):
wait_time = constant(3)
@task
def t(self):
pass
taskset = TS(User())
self.assertEqual(3, taskset.wait_time())
self.assertEqual(1, len(w))
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn("min_wait", str(w[0].message))
self.assertIn("max_wait", str(w[0].message))
def test_min_max_wait_on_taskset(self):
with warnings.catch_warnings(record=True) as w:
class User(Locust):
wait_time = constant(3)
class TS(TaskSet):
min_wait = 1000
max_wait = 1000
@task
def t(self):
pass
taskset = TS(User())
self.assertEqual(3, taskset.wait_time())
self.assertEqual(1, len(w))
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn("min_wait", str(w[0].message))
self.assertIn("max_wait", str(w[0].message))
| 38.364486
| 78
| 0.5581
| 444
| 4,105
| 5.024775
| 0.144144
| 0.014343
| 0.022411
| 0.053788
| 0.754818
| 0.733303
| 0.723442
| 0.723442
| 0.723442
| 0.71896
| 0
| 0.023625
| 0.340073
| 4,105
| 106
| 79
| 38.726415
| 0.799926
| 0
| 0
| 0.778947
| 0
| 0
| 0.021924
| 0
| 0
| 0
| 0
| 0
| 0.294737
| 1
| 0.115789
| false
| 0.063158
| 0.063158
| 0
| 0.326316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
492d7603cb63b6bf0f91bcc81aef452034356c74
| 6,031
|
py
|
Python
|
pyduplicate/database/utils.py
|
Clement-O/python-duplicate
|
4505076567a7cd8cc0904a6e0ca21f26c6c9dd2d
|
[
"MIT"
] | null | null | null |
pyduplicate/database/utils.py
|
Clement-O/python-duplicate
|
4505076567a7cd8cc0904a6e0ca21f26c6c9dd2d
|
[
"MIT"
] | 2
|
2019-10-19T05:58:13.000Z
|
2020-01-19T19:31:52.000Z
|
pyduplicate/database/utils.py
|
Clement-O/python-duplicate
|
4505076567a7cd8cc0904a6e0ca21f26c6c9dd2d
|
[
"MIT"
] | null | null | null |
# noinspection SqlNoDataSourceInspection,SqlDialectInspection
class Postgres:
"""
Provides functions to query PostgreSQL
:param str table: Param to select the right table
:param str column: Param to search duplicate or unique on
"""
def __init__(self, table: str, column: str) -> None:
self.table = table
self.column = column
if not all(isinstance(x, str) for x in [self.table, self.column]):
raise TypeError(f"The 'table' and/or 'column' must be a string!")
def select_pk_name_query(self) -> str:
"""
:return: Postgres query to select the primary key
"""
return f"SELECT a.attname " \
f"FROM pg_index AS i " \
f"JOIN pg_attribute AS a " \
f"ON a.attrelid = i.indrelid " \
f"AND a.attnum = ANY(i.indkey) " \
f"WHERE i.indrelid = '{self.table}'::regclass " \
f"AND i.indisprimary;"
def select_duplicate_query(self) -> str:
"""
:return: Postgres query to select the duplicate entries
"""
return f"SELECT t.* " \
f"FROM {self.table} AS t " \
f"INNER JOIN (" \
f"SELECT {self.column} " \
f"FROM {self.table} " \
f"GROUP BY {self.column} " \
f"HAVING ( COUNT(*) > 1 )" \
f") dt ON t.{self.column}=dt.{self.column}"
def select_duplicate_pk_query(self, pk: str) -> str:
"""
:param pk: Primary key of the table
:return: Postgres query to select the pk of the duplicate entries
"""
return f"SELECT t.{pk} " \
f"FROM {self.table} AS t " \
f"INNER JOIN (" \
f"SELECT {self.column} " \
f"FROM {self.table} " \
f"GROUP BY {self.column} " \
f"HAVING ( COUNT(*) > 1 )" \
f") dt ON t.{self.column}=dt.{self.column}"
def select_unique_query(self) -> str:
"""
:return: Postgres query to select the unique entries
"""
return f"SELECT t.* " \
f"FROM {self.table} AS t " \
f"INNER JOIN (" \
f"SELECT {self.column} " \
f"FROM {self.table} " \
f"GROUP BY {self.column} " \
f"HAVING ( COUNT(*) = 1 )" \
f") dt ON t.{self.column}=dt.{self.column}"
def select_unique_pk_query(self, pk: str) -> str:
"""
:param pk: Primary key of the table
:return: Postgres query to select the pk of the unique entries
"""
return f"SELECT t.{pk} " \
f"FROM {self.table} AS t " \
f"INNER JOIN (" \
f"SELECT {self.column} " \
f"FROM {self.table} " \
f"GROUP BY {self.column} " \
f"HAVING ( COUNT(*) = 1 )" \
f") dt ON t.{self.column}=dt.{self.column}"
# noinspection SqlNoDataSourceInspection,SqlDialectInspection
class MySQL:
"""
Provides functions to query MySQL
:param str table: Param to select the right table
:param str column: Param to search duplicate or unique on
"""
def __init__(self, table: str, column: str) -> None:
self.table = table
self.column = column
if not all(isinstance(x, str) for x in [self.table, self.column]):
raise TypeError(f"The 'table' and/or 'column' must be a string!")
def select_pk_name_query(self) -> str:
"""
:return: MySQL query to select the primary key
"""
return f"SELECT k.COLUMN_NAME " \
f"FROM information_schema.table_constraints t " \
f"LEFT JOIN information_schema.key_column_usage k " \
f"USING(constraint_name,table_schema,table_name) " \
f"WHERE t.constraint_type='PRIMARY KEY' " \
f"AND t.table_schema=DATABASE() " \
f"AND t.table_name='{self.table}'"
def select_duplicate_query(self) -> str:
"""
:return: MySQL query to select the duplicate entries
"""
return f"SELECT t.* " \
f"FROM {self.table} AS t " \
f"INNER JOIN (" \
f"SELECT {self.column} " \
f"FROM {self.table} " \
f"GROUP BY {self.column} " \
f"HAVING ( COUNT(*) > 1 )" \
f") dt ON t.{self.column}=dt.{self.column}"
def select_duplicate_pk_query(self, pk: str) -> str:
"""
:param pk: Primary key of the table
:return: MySQL query to select the pk of the duplicate entries
"""
return f"SELECT t.{pk} " \
f"FROM {self.table} AS t " \
f"INNER JOIN (" \
f"SELECT {self.column} " \
f"FROM {self.table} " \
f"GROUP BY {self.column} " \
f"HAVING ( COUNT(*) > 1 )" \
f") dt ON t.{self.column}=dt.{self.column}"
def select_unique_query(self) -> str:
"""
:return: MySQL query to select the unique entries
"""
return f"SELECT t.* " \
f"FROM {self.table} AS t " \
f"INNER JOIN (" \
f"SELECT {self.column} " \
f"FROM {self.table} " \
f"GROUP BY {self.column} " \
f"HAVING ( COUNT(*) = 1 )" \
f") dt ON t.{self.column}=dt.{self.column}"
def select_unique_pk_query(self, pk: str) -> str:
"""
:param pk: Primary key of the table
:return: MySQL query to select the pk of the unique entries
"""
return f"SELECT t.{pk} " \
f"FROM {self.table} AS t " \
f"INNER JOIN (" \
f"SELECT {self.column} " \
f"FROM {self.table} " \
f"GROUP BY {self.column} " \
f"HAVING ( COUNT(*) = 1 )" \
f") dt ON t.{self.column}=dt.{self.column}"
| 34.861272
| 77
| 0.500912
| 737
| 6,031
| 4.033921
| 0.112619
| 0.12109
| 0.048436
| 0.075345
| 0.817693
| 0.817693
| 0.817693
| 0.805584
| 0.805584
| 0.75412
| 0
| 0.002117
| 0.373404
| 6,031
| 172
| 78
| 35.063953
| 0.784599
| 0.185044
| 0
| 0.84
| 0
| 0
| 0.415234
| 0.103373
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12
| false
| 0
| 0
| 0
| 0.24
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49584d3d702b5293e122a0381a876f33e0c587a5
| 40,759
|
py
|
Python
|
pygbe/util/an_solution.py
|
ReneVelasquez18/PYGBE
|
ea0f875afc74e5c2b5f1fcea7b643a82acea171e
|
[
"BSD-3-Clause"
] | 36
|
2015-02-17T15:45:23.000Z
|
2019-10-28T15:14:23.000Z
|
pygbe/util/an_solution.py
|
ReneVelasquez18/PYGBE
|
ea0f875afc74e5c2b5f1fcea7b643a82acea171e
|
[
"BSD-3-Clause"
] | 48
|
2016-02-04T22:50:36.000Z
|
2019-06-25T17:01:06.000Z
|
pygbe/util/an_solution.py
|
ReneVelasquez18/PYGBE
|
ea0f875afc74e5c2b5f1fcea7b643a82acea171e
|
[
"BSD-3-Clause"
] | 26
|
2015-05-15T22:14:50.000Z
|
2019-02-07T19:00:47.000Z
|
"""
It contains the functions to compute the cases that presents an analytical
solutions.
All functions output the analytical solution in kcal/mol
"""
import numpy
from numpy import pi
from scipy import special, linalg
from scipy.misc import factorial
from math import gamma
def an_spherical(q, xq, E_1, E_2, E_0, R, N):
"""
It computes the analytical solution of the potential of a sphere with
Nq charges inside.
Took from Kirkwood (1934).
Arguments
----------
q : array, charges.
xq : array, positions of the charges.
E_1: float, dielectric constant inside the sphere.
E_2: float, dielectric constant outside the sphere.
E_0: float, dielectric constant of vacuum.
R : float, radius of the sphere.
N : int, number of terms desired in the spherical harmonic expansion.
Returns
--------
PHI: array, reaction potential.
"""
PHI = numpy.zeros(len(q))
for K in range(len(q)):
rho = numpy.sqrt(numpy.sum(xq[K]**2))
zenit = numpy.arccos(xq[K, 2] / rho)
azim = numpy.arctan2(xq[K, 1], xq[K, 0])
phi = 0. + 0. * 1j
for n in range(N):
for m in range(-n, n + 1):
sph1 = special.sph_harm(m, n, zenit, azim)
cons1 = rho**n / (E_1 * E_0 * R**(2 * n + 1)) * (E_1 - E_2) * (
n + 1) / (E_1 * n + E_2 * (n + 1))
cons2 = 4 * pi / (2 * n + 1)
for k in range(len(q)):
rho_k = numpy.sqrt(numpy.sum(xq[k]**2))
zenit_k = numpy.arccos(xq[k, 2] / rho_k)
azim_k = numpy.arctan2(xq[k, 1], xq[k, 0])
sph2 = numpy.conj(special.sph_harm(m, n, zenit_k, azim_k))
phi += cons1 * cons2 * q[K] * rho_k**n * sph1 * sph2
PHI[K] = numpy.real(phi) / (4 * pi)
return PHI
def get_K(x, n):
"""
It computes the polinomials K needed for Kirkwood-1934 solutions.
K_n(x) in Equation 4 in Kirkwood 1934.
Arguments
----------
x: float, evaluation point of K.
n: int, number of terms desired in the expansion.
Returns
--------
K: float, polinomials K.
"""
K = 0.
n_fact = factorial(n)
n_fact2 = factorial(2 * n)
for s in range(n + 1):
K += 2**s * n_fact * factorial(2 * n - s) / (factorial(s) * n_fact2 *
factorial(n - s)) * x**s
return K
def an_P(q, xq, E_1, E_2, R, kappa, a, N):
"""
It computes the solvation energy according to Kirkwood-1934.
Arguments
----------
q : array, charges.
xq : array, positions of the charges.
E_1 : float, dielectric constant inside the sphere.
E_2 : float, dielectric constant outside the sphere.
R : float, radius of the sphere.
kappa: float, reciprocal of Debye length.
a : float, radius of the Stern Layer.
N : int, number of terms desired in the polinomial expansion.
Returns
--------
E_P : float, solvation energy.
"""
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
PHI = numpy.zeros(len(q))
for K in range(len(q)):
rho = numpy.sqrt(numpy.sum(xq[K]**2))
zenit = numpy.arccos(xq[K, 2] / rho)
azim = numpy.arctan2(xq[K, 1], xq[K, 0])
phi = 0. + 0. * 1j
for n in range(N):
for m in range(-n, n + 1):
P1 = special.lpmv(numpy.abs(m), n, numpy.cos(zenit))
Enm = 0.
for k in range(len(q)):
rho_k = numpy.sqrt(numpy.sum(xq[k]**2))
zenit_k = numpy.arccos(xq[k, 2] / rho_k)
azim_k = numpy.arctan2(xq[k, 1], xq[k, 0])
P2 = special.lpmv(numpy.abs(m), n, numpy.cos(zenit_k))
Enm += q[k] * rho_k**n * factorial(n - numpy.abs(
m)) / factorial(n + numpy.abs(m)) * P2 * numpy.exp(
-1j * m * azim_k)
C2 = (kappa * a)**2 * get_K(kappa * a, n - 1) / (
get_K(kappa * a, n + 1) + n * (E_2 - E_1) / (
(n + 1) * E_2 + n * E_1) * (R / a)**(2 * n + 1) *
(kappa * a)**2 * get_K(kappa * a, n - 1) / ((2 * n - 1) *
(2 * n + 1)))
C1 = Enm / (E_2 * E_0 * a**
(2 * n + 1)) * (2 * n + 1) / (2 * n - 1) * (E_2 / (
(n + 1) * E_2 + n * E_1))**2
if n == 0 and m == 0:
Bnm = Enm / (E_0 * R) * (
1 / E_2 - 1 / E_1) - Enm * kappa * a / (
E_0 * E_2 * a * (1 + kappa * a))
else:
Bnm = 1. / (E_1 * E_0 * R**(2 * n + 1)) * (E_1 - E_2) * (
n + 1) / (E_1 * n + E_2 * (n + 1)) * Enm - C1 * C2
phi += Bnm * rho**n * P1 * numpy.exp(1j * m * azim)
PHI[K] = numpy.real(phi) / (4 * pi)
C0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J)
E_P = 0.5 * C0 * numpy.sum(q * PHI)
return E_P
def two_sphere(a, R, kappa, E_1, E_2, q):
"""
It computes the analytical solution of a spherical surface and a spherical
molecule with a center charge, both of radius R.
Follows Cooper&Barba 2016
Arguments
----------
a : float, center to center distance.
R : float, radius of surface and molecule.
kappa: float, reciprocal of Debye length.
E_1 : float, dielectric constant inside the sphere.
E_2 : float, dielectric constant outside the sphere.
q : float, number of qe to be asigned to the charge.
Returns
--------
Einter : float, interaction energy.
E1sphere: float, solvation energy of one sphere.
E2sphere: float, solvation energy of two spheres together.
Note:
Einter should match (E2sphere - 2xE1sphere)
"""
N = 20 # Number of terms in expansion.
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
index2 = numpy.arange(N + 1, dtype=float) + 0.5
index = index2[0:-1]
K1 = special.kv(index2, kappa * a)
K1p = index / (kappa * a) * K1[0:-1] - K1[1:]
k1 = special.kv(index, kappa * a) * numpy.sqrt(pi / (2 * kappa * a))
k1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * a)**(3 / 2.)) * special.kv(
index, kappa * a) + numpy.sqrt(pi / (2 * kappa * a)) * K1p
I1 = special.iv(index2, kappa * a)
I1p = index / (kappa * a) * I1[0:-1] + I1[1:]
i1 = special.iv(index, kappa * a) * numpy.sqrt(pi / (2 * kappa * a))
i1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * a)**(3 / 2.)) * special.iv(
index, kappa * a) + numpy.sqrt(pi / (2 * kappa * a)) * I1p
B = numpy.zeros((N, N), dtype=float)
for n in range(N):
for m in range(N):
for nu in range(N):
if n >= nu and m >= nu:
g1 = gamma(n - nu + 0.5)
g2 = gamma(m - nu + 0.5)
g3 = gamma(nu + 0.5)
g4 = gamma(m + n - nu + 1.5)
f1 = factorial(n + m - nu)
f2 = factorial(n - nu)
f3 = factorial(m - nu)
f4 = factorial(nu)
Anm = g1 * g2 * g3 * f1 * (n + m - 2 * nu + 0.5) / (
pi * g4 * f2 * f3 * f4)
kB = special.kv(n + m - 2 * nu + 0.5, kappa *
R) * numpy.sqrt(pi / (2 * kappa * R))
B[n, m] += Anm * kB
M = numpy.zeros((N, N), float)
E_hat = E_1 / E_2
for i in range(N):
for j in range(N):
M[i, j] = (2 * i + 1) * B[i, j] * (
kappa * i1p[i] - E_hat * i * i1[i] / a)
if i == j:
M[i, j] += kappa * k1p[i] - E_hat * i * k1[i] / a
RHS = numpy.zeros(N)
RHS[0] = -E_hat * q / (4 * pi * E_1 * a * a)
a_coeff = linalg.solve(M, RHS)
a0 = a_coeff[0]
a0_inf = -E_hat * q / (4 * pi * E_1 * a * a) * 1 / (kappa * k1p[0])
phi_2 = a0 * k1[0] + i1[0] * numpy.sum(a_coeff * B[:, 0]) - q / (4 * pi *
E_1 * a)
phi_1 = a0_inf * k1[0] - q / (4 * pi * E_1 * a)
phi_inter = phi_2 - phi_1
CC0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
Einter = 0.5 * CC0 * q * phi_inter
E1sphere = 0.5 * CC0 * q * phi_1
E2sphere = 0.5 * CC0 * q * phi_2
return Einter, E1sphere, E2sphere
def constant_potential_single_point(phi0, a, r, kappa):
"""
It computes the potential in a point 'r' due to a spherical surface
with constant potential phi0, immersed in water. Solution to the
Poisson-Boltzmann problem.
Arguments
----------
phi0 : float, constant potential on the surface of the sphere.
a : float, radius of the sphere.
r : float, distance from the center of the sphere to the evaluation
point.
kappa: float, reciprocal of Debye length.
Returns
--------
phi : float, potential.
"""
phi = a / r * phi0 * numpy.exp(kappa * (a - r))
return phi
def constant_charge_single_point(sigma0, a, r, kappa, epsilon):
"""
It computes the potential in a point 'r' due to a spherical surface
with constant charge sigma0 immersed in water. Solution to the
Poisson-Boltzmann problem. .
Arguments
----------
sigma0 : float, constant charge on the surface of the sphere.
a : float, radius of the sphere.
r : float, distance from the center of the sphere to the evaluation
point.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Returns
--------
phi : float, potential.
"""
dphi0 = -sigma0 / epsilon
phi = -dphi0 * a * a / (1 + kappa * a) * numpy.exp(kappa * (a - r)) / r
return phi
def constant_potential_single_charge(phi0, radius, kappa, epsilon):
"""
It computes the surface charge of a sphere at constant potential, immersed
in water.
Arguments
----------
phi0 : float, constant potential on the surface of the sphere.
radius : float, radius of the sphere.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant .
Returns
--------
sigma : float, surface charge.
"""
dphi = -phi0 * ((1. + kappa * radius) / radius)
sigma = -epsilon * dphi # Surface charge
return sigma
def constant_charge_single_potential(sigma0, radius, kappa, epsilon):
"""
It computes the surface potential on a sphere at constant charged, immersed
in water.
Arguments
----------
sigma0 : float, constant charge on the surface of the sphere.
radius : float, radius of the sphere.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Returns
--------
phi : float, potential.
"""
dphi = -sigma0 / epsilon
phi = -dphi * radius / (1. + kappa * radius) # Surface potential
return phi
def constant_potential_twosphere(phi01, phi02, r1, r2, R, kappa, epsilon):
"""
It computes the solvation energy of two spheres at constant potential,
immersed in water.
Arguments
----------
phi01 : float, constant potential on the surface of the sphere 1.
phi02 : float, constant potential on the surface of the sphere 2.
r1 : float, radius of sphere 1.
r2 : float, radius of sphere 2.
R : float, distance center to center.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Returns
--------
E_solv : float, solvation energy.
"""
kT = 4.1419464e-21 # at 300K
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
C0 = kT / qe
phi01 /= C0
phi02 /= C0
k1 = special.kv(0.5, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
k2 = special.kv(0.5, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
B00 = special.kv(0.5, kappa * R) * numpy.sqrt(pi / (2 * kappa * R))
# k1 = special.kv(0.5,kappa*r1)*numpy.sqrt(2/(pi*kappa*r1))
# k2 = special.kv(0.5,kappa*r2)*numpy.sqrt(2/(pi*kappa*r2))
# B00 = special.kv(0.5,kappa*R)*numpy.sqrt(2/(pi*kappa*R))
i1 = special.iv(0.5, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
i2 = special.iv(0.5, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
a0 = (phi02 * B00 * i1 - phi01 * k2) / (B00 * B00 * i2 * i1 - k1 * k2)
b0 = (phi02 * k1 - phi01 * B00 * i2) / (k2 * k1 - B00 * B00 * i1 * i2)
U1 = 2 * pi * phi01 * (phi01 * numpy.exp(kappa * r1) * (kappa * r1) *
(kappa * r1) / numpy.sinh(kappa * r1) - pi * a0 /
(2 * i1))
U2 = 2 * pi * phi02 * (phi02 * numpy.exp(kappa * r2) * (kappa * r2) *
(kappa * r2) / numpy.sinh(kappa * r2) - pi * b0 /
(2 * i2))
print('U1: {}'.format(U1))
print('U2: {}'.format(U2))
print('E: {}'.format(U1 + U2))
C1 = C0 * C0 * epsilon / kappa
u1 = U1 * C1
u2 = U2 * C1
CC0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
E_solv = CC0 * (u1 + u2)
return E_solv
def constant_potential_twosphere_2(phi01, phi02, r1, r2, R, kappa, epsilon):
"""
It computes the solvation energy of two spheres at constant potential,
immersed in water.
Arguments
----------
phi01 : float, constant potential on the surface of the sphere 1.
phi02 : float, constant potential on the surface of the sphere 2.
r1 : float, radius of sphere 1.
r2 : float, radius of sphere 2.
R : float, distance center to center.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Returns
--------
E_solv : float, solvation energy.
"""
kT = 4.1419464e-21 # at 300K
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
h = R - r1 - r2
# E_inter = r1*r2*epsilon/(4*R) * ( (phi01+phi02)**2 * log(1+numpy.exp(-kappa*h)) + (phi01-phi02)**2*log(1-numpy.exp(-kappa*h)) )
# E_inter = epsilon*r1*phi01**2/2 * log(1+numpy.exp(-kappa*h))
E_solv = epsilon * r1 * r2 * (phi01**2 + phi02**2) / (4 * (r1 + r2)) * (
(2 * phi01 * phi02) / (phi01**2 + phi02**2) * log(
(1 + numpy.exp(-kappa * h)) /
(1 - numpy.exp(-kappa * h))) + log(1 - numpy.exp(-2 * kappa * h)))
CC0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
E_solv *= CC0
return E_solv
def constant_potential_single_energy(phi0, r1, kappa, epsilon):
"""
It computes the total energy of a single sphere at constant potential,
inmmersed in water.
Arguments
----------
phi0 : float, constant potential on the surface of the sphere.
r1 : float, radius of sphere.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Returns
--------
E : float, total energy.
"""
N = 1 # Number of terms in expansion
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
index2 = numpy.arange(N + 1, dtype=float) + 0.5
index = index2[0:-1]
K1 = special.kv(index2, kappa * r1)
K1p = index / (kappa * r1) * K1[0:-1] - K1[1:]
k1 = special.kv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
k1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.kv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * K1p
a0_inf = phi0 / k1[0]
U1_inf = a0_inf * k1p[0]
C1 = 2 * pi * kappa * phi0 * r1 * r1 * epsilon
C0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
E = C0 * C1 * U1_inf
return E
def constant_charge_single_energy(sigma0, r1, kappa, epsilon):
"""
It computes the total energy of a single sphere at constant charge,
inmmersed in water.
Arguments
----------
sigma0 : float, constant charge on the surface of the sphere.
r1 : float, radius of sphere.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Returns
--------
E : float, total energy.
"""
N = 20 # Number of terms in expansion
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
index2 = numpy.arange(N + 1, dtype=float) + 0.5
index = index2[0:-1]
K1 = special.kv(index2, kappa * r1)
K1p = index / (kappa * r1) * K1[0:-1] - K1[1:]
k1 = special.kv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
k1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.kv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * K1p
a0_inf = -sigma0 / (epsilon * kappa * k1p[0])
U1_inf = a0_inf * k1[0]
C1 = 2 * pi * sigma0 * r1 * r1
C0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
E = C0 * C1 * U1_inf
return E
def constant_potential_twosphere_dissimilar(phi01, phi02, r1, r2, R, kappa,
epsilon):
"""
It computes the interaction energy for dissimilar spheres at constant
potential, immersed in water.
Arguments
----------
phi01 : float, constant potential on the surface of the sphere 1.
phi02 : float, constant potential on the surface of the sphere 2.
r1 : float, radius of sphere 1.
r2 : float, radius of sphere 2.
R : float, distance center to center.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Returns
--------
E_inter: float, interaction energy.
"""
N = 20 # Number of terms in expansion
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
index2 = numpy.arange(N + 1, dtype=float) + 0.5
index = index2[0:-1]
K1 = special.kv(index2, kappa * r1)
K1p = index / (kappa * r1) * K1[0:-1] - K1[1:]
k1 = special.kv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
k1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.kv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * K1p
K2 = special.kv(index2, kappa * r2)
K2p = index / (kappa * r2) * K2[0:-1] - K2[1:]
k2 = special.kv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
k2p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r2)**(3 / 2.)) * special.kv(
index, kappa * r2) + numpy.sqrt(pi / (2 * kappa * r2)) * K2p
I1 = special.iv(index2, kappa * r1)
I1p = index / (kappa * r1) * I1[0:-1] + I1[1:]
i1 = special.iv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
i1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.iv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * I1p
I2 = special.iv(index2, kappa * r2)
I2p = index / (kappa * r2) * I2[0:-1] + I2[1:]
i2 = special.iv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
i2p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r2)**(3 / 2.)) * special.iv(
index, kappa * r2) + numpy.sqrt(pi / (2 * kappa * r2)) * I2p
B = numpy.zeros((N, N), dtype=float)
for n in range(N):
for m in range(N):
for nu in range(N):
if n >= nu and m >= nu:
g1 = gamma(n - nu + 0.5)
g2 = gamma(m - nu + 0.5)
g3 = gamma(nu + 0.5)
g4 = gamma(m + n - nu + 1.5)
f1 = factorial(n + m - nu)
f2 = factorial(n - nu)
f3 = factorial(m - nu)
f4 = factorial(nu)
Anm = g1 * g2 * g3 * f1 * (n + m - 2 * nu + 0.5) / (
pi * g4 * f2 * f3 * f4)
kB = special.kv(n + m - 2 * nu + 0.5, kappa *
R) * numpy.sqrt(pi / (2 * kappa * R))
B[n, m] += Anm * kB
M = numpy.zeros((2 * N, 2 * N), float)
for j in range(N):
for n in range(N):
M[j, n + N] = (2 * j + 1) * B[j, n] * i1[j] / k2[n]
M[j + N, n] = (2 * j + 1) * B[j, n] * i2[j] / k1[n]
if n == j:
M[j, n] = 1
M[j + N, n + N] = 1
RHS = numpy.zeros(2 * N)
RHS[0] = phi01
RHS[N] = phi02
coeff = linalg.solve(M, RHS)
a = coeff[0:N] / k1
b = coeff[N:2 * N] / k2
a0 = a[0]
a0_inf = phi01 / k1[0]
b0 = b[0]
b0_inf = phi02 / k2[0]
U1_inf = a0_inf * k1p[0]
U1_h = a0 * k1p[0] + i1p[0] * numpy.sum(b * B[:, 0])
U2_inf = b0_inf * k2p[0]
U2_h = b0 * k2p[0] + i2p[0] * numpy.sum(a * B[:, 0])
C1 = 2 * pi * kappa * phi01 * r1 * r1 * epsilon
C2 = 2 * pi * kappa * phi02 * r2 * r2 * epsilon
C0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
E_inter = C0 * (C1 * (U1_h - U1_inf) + C2 * (U2_h - U2_inf))
return E_inter
def constant_charge_twosphere_dissimilar(sigma01, sigma02, r1, r2, R, kappa,
epsilon):
"""
It computes the interaction energy between two dissimilar spheres at
constant charge, immersed in water.
Arguments
----------
sigma01: float, constant charge on the surface of the sphere 1.
sigma02: float, constant charge on the surface of the sphere 2.
r1 : float, radius of sphere 1.
r2 : float, radius of sphere 2.
R : float, distance center to center.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Returns
--------
E_inter: float, interaction energy.
"""
N = 20 # Number of terms in expansion
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
index2 = numpy.arange(N + 1, dtype=float) + 0.5
index = index2[0:-1]
K1 = special.kv(index2, kappa * r1)
K1p = index / (kappa * r1) * K1[0:-1] - K1[1:]
k1 = special.kv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
k1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.kv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * K1p
K2 = special.kv(index2, kappa * r2)
K2p = index / (kappa * r2) * K2[0:-1] - K2[1:]
k2 = special.kv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
k2p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r2)**(3 / 2.)) * special.kv(
index, kappa * r2) + numpy.sqrt(pi / (2 * kappa * r2)) * K2p
I1 = special.iv(index2, kappa * r1)
I1p = index / (kappa * r1) * I1[0:-1] + I1[1:]
i1 = special.iv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
i1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.iv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * I1p
I2 = special.iv(index2, kappa * r2)
I2p = index / (kappa * r2) * I2[0:-1] + I2[1:]
i2 = special.iv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
i2p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r2)**(3 / 2.)) * special.iv(
index, kappa * r2) + numpy.sqrt(pi / (2 * kappa * r2)) * I2p
B = numpy.zeros((N, N), dtype=float)
for n in range(N):
for m in range(N):
for nu in range(N):
if n >= nu and m >= nu:
g1 = gamma(n - nu + 0.5)
g2 = gamma(m - nu + 0.5)
g3 = gamma(nu + 0.5)
g4 = gamma(m + n - nu + 1.5)
f1 = factorial(n + m - nu)
f2 = factorial(n - nu)
f3 = factorial(m - nu)
f4 = factorial(nu)
Anm = g1 * g2 * g3 * f1 * (n + m - 2 * nu + 0.5) / (
pi * g4 * f2 * f3 * f4)
kB = special.kv(n + m - 2 * nu + 0.5, kappa *
R) * numpy.sqrt(pi / (2 * kappa * R))
B[n, m] += Anm * kB
M = numpy.zeros((2 * N, 2 * N), float)
for j in range(N):
for n in range(N):
M[j, n + N] = (2 * j + 1) * B[j, n] * r1 * i1p[j] / (r2 * k2p[n])
M[j + N, n] = (2 * j + 1) * B[j, n] * r2 * i2p[j] / (r1 * k1p[n])
if n == j:
M[j, n] = 1
M[j + N, n + N] = 1
RHS = numpy.zeros(2 * N)
RHS[0] = sigma01 * r1 / epsilon
RHS[N] = sigma02 * r2 / epsilon
coeff = linalg.solve(M, RHS)
a = coeff[0:N] / (-r1 * kappa * k1p)
b = coeff[N:2 * N] / (-r2 * kappa * k2p)
a0 = a[0]
a0_inf = -sigma01 / (epsilon * kappa * k1p[0])
b0 = b[0]
b0_inf = -sigma02 / (epsilon * kappa * k2p[0])
U1_inf = a0_inf * k1[0]
U1_h = a0 * k1[0] + i1[0] * numpy.sum(b * B[:, 0])
U2_inf = b0_inf * k2[0]
U2_h = b0 * k2[0] + i2[0] * numpy.sum(a * B[:, 0])
C1 = 2 * pi * sigma01 * r1 * r1
C2 = 2 * pi * sigma02 * r2 * r2
C0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
E_inter = C0 * (C1 * (U1_h - U1_inf) + C2 * (U2_h - U2_inf))
return E_inter
def molecule_constant_potential(q, phi02, r1, r2, R, kappa, E_1, E_2):
"""
It computes the interaction energy between a molecule (sphere with
point-charge in the center) and a sphere at constant potential, immersed
in water.
Arguments
----------
q : float, number of qe to be asigned to the charge.
phi02 : float, constant potential on the surface of the sphere 2.
r1 : float, radius of sphere 1, i.e the molecule.
r2 : float, radius of sphere 2.
R : float, distance center to center.
kappa : float, reciprocal of Debye length.
E_1 : float, dielectric constant inside the sphere/molecule.
E_2 : float, dielectric constant outside the sphere/molecule.
Returns
--------
E_inter: float, interaction energy.
"""
N = 20 # Number of terms in expansion
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
index2 = numpy.arange(N + 1, dtype=float) + 0.5
index = index2[0:-1]
K1 = special.kv(index2, kappa * r1)
K1p = index / (kappa * r1) * K1[0:-1] - K1[1:]
k1 = special.kv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
k1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.kv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * K1p
K2 = special.kv(index2, kappa * r2)
K2p = index / (kappa * r2) * K2[0:-1] - K2[1:]
k2 = special.kv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
k2p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r2)**(3 / 2.)) * special.kv(
index, kappa * r2) + numpy.sqrt(pi / (2 * kappa * r2)) * K2p
I1 = special.iv(index2, kappa * r1)
I1p = index / (kappa * r1) * I1[0:-1] + I1[1:]
i1 = special.iv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
i1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.iv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * I1p
I2 = special.iv(index2, kappa * r2)
I2p = index / (kappa * r2) * I2[0:-1] + I2[1:]
i2 = special.iv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
i2p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r2)**(3 / 2.)) * special.iv(
index, kappa * r2) + numpy.sqrt(pi / (2 * kappa * r2)) * I2p
B = numpy.zeros((N, N), dtype=float)
for n in range(N):
for m in range(N):
for nu in range(N):
if n >= nu and m >= nu:
g1 = gamma(n - nu + 0.5)
g2 = gamma(m - nu + 0.5)
g3 = gamma(nu + 0.5)
g4 = gamma(m + n - nu + 1.5)
f1 = factorial(n + m - nu)
f2 = factorial(n - nu)
f3 = factorial(m - nu)
f4 = factorial(nu)
Anm = g1 * g2 * g3 * f1 * (n + m - 2 * nu + 0.5) / (
pi * g4 * f2 * f3 * f4)
kB = special.kv(n + m - 2 * nu + 0.5, kappa *
R) * numpy.sqrt(pi / (2 * kappa * R))
B[n, m] += Anm * kB
E_hat = E_1 / E_2
M = numpy.zeros((2 * N, 2 * N), float)
for j in range(N):
for n in range(N):
M[j, n + N] = (2 * j + 1) * B[j, n] * (
kappa * i1p[j] / k2[n] - E_hat * j / r1 * i1[j] / k2[n])
M[j + N, n] = (2 * j + 1) * B[j, n] * i2[j] * 1 / (
kappa * k1p[n] - E_hat * n / r1 * k1[n])
if n == j:
M[j, n] = 1
M[j + N, n + N] = 1
RHS = numpy.zeros(2 * N)
RHS[0] = -E_hat * q / (4 * pi * E_1 * r1 * r1)
RHS[N] = phi02
coeff = linalg.solve(M, RHS)
a = coeff[0:N] / (kappa * k1p - E_hat * numpy.arange(N) / r1 * k1)
b = coeff[N:2 * N] / k2
a0 = a[0]
a0_inf = -E_hat * q / (4 * pi * E_1 * r1 * r1) * 1 / (kappa * k1p[0])
b0 = b[0]
b0_inf = phi02 / k2[0]
phi_inf = a0_inf * k1[0] - q / (4 * pi * E_1 * r1)
phi_h = a0 * k1[0] + i1[0] * numpy.sum(b * B[:, 0]) - q / (4 * pi * E_1 *
r1)
phi_inter = phi_h - phi_inf
U_inf = b0_inf * k2p[0]
U_h = b0 * k2p[0] + i2p[0] * numpy.sum(a * B[:, 0])
U_inter = U_h - U_inf
C0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
C1 = q * 0.5
C2 = 2 * pi * kappa * phi02 * r2 * r2 * E_2
E_inter = C0 * (C1 * phi_inter + C2 * U_inter)
return E_inter
def molecule_constant_charge(q, sigma02, r1, r2, R, kappa, E_1, E_2):
"""
It computes the interaction energy between a molecule (sphere with
point-charge in the center) and a sphere at constant charge, immersed
in water.
Arguments
----------
q : float, number of qe to be asigned to the charge.
sigma02: float, constant charge on the surface of the sphere 2.
r1 : float, radius of sphere 1, i.e the molecule.
r2 : float, radius of sphere 2.
R : float, distance center to center.
kappa : float, reciprocal of Debye length.
E_1 : float, dielectric constant inside the sphere/molecule.
E_2 : float, dielectric constant outside the sphere/molecule.
Returns
--------
E_inter: float, interaction energy.
"""
N = 20 # Number of terms in expansion
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
index2 = numpy.arange(N + 1, dtype=float) + 0.5
index = index2[0:-1]
K1 = special.kv(index2, kappa * r1)
K1p = index / (kappa * r1) * K1[0:-1] - K1[1:]
k1 = special.kv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
k1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.kv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * K1p
K2 = special.kv(index2, kappa * r2)
K2p = index / (kappa * r2) * K2[0:-1] - K2[1:]
k2 = special.kv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
k2p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r2)**(3 / 2.)) * special.kv(
index, kappa * r2) + numpy.sqrt(pi / (2 * kappa * r2)) * K2p
I1 = special.iv(index2, kappa * r1)
I1p = index / (kappa * r1) * I1[0:-1] + I1[1:]
i1 = special.iv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
i1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r1)**(3 / 2.)) * special.iv(
index, kappa * r1) + numpy.sqrt(pi / (2 * kappa * r1)) * I1p
I2 = special.iv(index2, kappa * r2)
I2p = index / (kappa * r2) * I2[0:-1] + I2[1:]
i2 = special.iv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
i2p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * r2)**(3 / 2.)) * special.iv(
index, kappa * r2) + numpy.sqrt(pi / (2 * kappa * r2)) * I2p
B = numpy.zeros((N, N), dtype=float)
for n in range(N):
for m in range(N):
for nu in range(N):
if n >= nu and m >= nu:
g1 = gamma(n - nu + 0.5)
g2 = gamma(m - nu + 0.5)
g3 = gamma(nu + 0.5)
g4 = gamma(m + n - nu + 1.5)
f1 = factorial(n + m - nu)
f2 = factorial(n - nu)
f3 = factorial(m - nu)
f4 = factorial(nu)
Anm = g1 * g2 * g3 * f1 * (n + m - 2 * nu + 0.5) / (
pi * g4 * f2 * f3 * f4)
kB = special.kv(n + m - 2 * nu + 0.5, kappa *
R) * numpy.sqrt(pi / (2 * kappa * R))
B[n, m] += Anm * kB
E_hat = E_1 / E_2
M = numpy.zeros((2 * N, 2 * N), float)
for j in range(N):
for n in range(N):
M[j, n + N] = (2 * j + 1) * B[j, n] * (
i1p[j] / k2p[n] - E_hat * j / r1 * i1[j] / (kappa * k2p[n]))
M[j + N, n] = (2 * j + 1) * B[j, n] * i2p[j] * kappa * 1 / (
kappa * k1p[n] - E_hat * n / r1 * k1[n])
if n == j:
M[j, n] = 1
M[j + N, n + N] = 1
RHS = numpy.zeros(2 * N)
RHS[0] = -E_hat * q / (4 * pi * E_1 * r1 * r1)
RHS[N] = -sigma02 / E_2
coeff = linalg.solve(M, RHS)
a = coeff[0:N] / (kappa * k1p - E_hat * numpy.arange(N) / r1 * k1)
b = coeff[N:2 * N] / (kappa * k2p)
a0 = a[0]
a0_inf = -E_hat * q / (4 * pi * E_1 * r1 * r1) * 1 / (kappa * k1p[0])
b0 = b[0]
b0_inf = -sigma02 / (E_2 * kappa * k2p[0])
phi_inf = a0_inf * k1[0] - q / (4 * pi * E_1 * r1)
phi_h = a0 * k1[0] + i1[0] * numpy.sum(b * B[:, 0]) - q / (4 * pi * E_1 *
r1)
phi_inter = phi_h - phi_inf
U_inf = b0_inf * k2[0]
U_h = b0 * k2[0] + i2[0] * numpy.sum(a * B[:, 0])
U_inter = U_h - U_inf
C0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
C1 = q * 0.5
C2 = 2 * pi * sigma02 * r2 * r2
E_inter = C0 * (C1 * phi_inter + C2 * U_inter)
return E_inter
def constant_potential_twosphere_identical(phi01, phi02, r1, r2, R, kappa,
epsilon):
"""
It computes the interaction energy for two spheres at constants surface
potential, according to Carnie&Chan-1993.
Arguments
----------
phi01 : float, constant potential on the surface of the sphere 1.
phi02 : float, constant potential on the surface of the sphere 2.
r1 : float, radius of sphere 1.
r2 : float, radius of sphere 2.
R : float, distance center to center.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Note:
Even though it admits phi01 and phi02, they should be identical; and
the same is applicable to r1 and r2.
Returns
--------
E_inter: float, interaction energy.
"""
# From Carnie+Chan 1993
N = 20 # Number of terms in expansion
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
index = numpy.arange(N, dtype=float) + 0.5
k1 = special.kv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
k2 = special.kv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
i1 = special.iv(index, kappa * r1) * numpy.sqrt(pi / (2 * kappa * r1))
i2 = special.iv(index, kappa * r2) * numpy.sqrt(pi / (2 * kappa * r2))
B = numpy.zeros((N, N), dtype=float)
for n in range(N):
for m in range(N):
for nu in range(N):
if n >= nu and m >= nu:
g1 = gamma(n - nu + 0.5)
g2 = gamma(m - nu + 0.5)
g3 = gamma(nu + 0.5)
g4 = gamma(m + n - nu + 1.5)
f1 = factorial(n + m - nu)
f2 = factorial(n - nu)
f3 = factorial(m - nu)
f4 = factorial(nu)
Anm = g1 * g2 * g3 * f1 * (n + m - 2 * nu + 0.5) / (
pi * g4 * f2 * f3 * f4)
kB = special.kv(n + m - 2 * nu + 0.5, kappa *
R) * numpy.sqrt(pi / (2 * kappa * R))
B[n, m] += Anm * kB
M = numpy.zeros((N, N), float)
for i in range(N):
for j in range(N):
M[i, j] = (2 * i + 1) * B[i, j] * i1[i]
if i == j:
M[i, j] += k1[i]
RHS = numpy.zeros(N)
RHS[0] = phi01
a = linalg.solve(M, RHS)
a0 = a[0]
U = 4 * pi * (-pi / 2 * a0 / phi01 * 1 / numpy.sinh(kappa * r1) + kappa *
r1 + kappa * r1 / numpy.tanh(kappa * r1))
C0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
C1 = r1 * epsilon * phi01 * phi01
E_inter = U * C1 * C0
return E_inter
def constant_charge_twosphere_identical(sigma, a, R, kappa, epsilon):
"""
It computes the interaction energy for two spheres at constants surface
charge, according to Carnie&Chan-1993.
Arguments
----------
sigma : float, constant charge on the surface of the spheres.
a : float, radius of spheres.
R : float, distance center to center.
kappa : float, reciprocal of Debye length.
epsilon: float, water dielectric constant.
Returns
--------
E_inter: float, interaction energy.
"""
# From Carnie+Chan 1993
N = 10 # Number of terms in expansion
E_p = 0 # Permitivitty inside sphere
qe = 1.60217646e-19
Na = 6.0221415e23
E_0 = 8.854187818e-12
cal2J = 4.184
index2 = numpy.arange(N + 1, dtype=float) + 0.5
index = index2[0:-1]
K1 = special.kv(index2, kappa * a)
K1p = index / (kappa * a) * K1[0:-1] - K1[1:]
k1 = special.kv(index, kappa * a) * numpy.sqrt(pi / (2 * kappa * a))
k1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * a)**(3 / 2.)) * special.kv(
index, kappa * a) + numpy.sqrt(pi / (2 * kappa * a)) * K1p
I1 = special.iv(index2, kappa * a)
I1p = index / (kappa * a) * I1[0:-1] + I1[1:]
i1 = special.iv(index, kappa * a) * numpy.sqrt(pi / (2 * kappa * a))
i1p = -numpy.sqrt(pi / 2) * 1 / (2 * (kappa * a)**(3 / 2.)) * special.iv(
index, kappa * a) + numpy.sqrt(pi / (2 * kappa * a)) * I1p
B = numpy.zeros((N, N), dtype=float)
for n in range(N):
for m in range(N):
for nu in range(N):
if n >= nu and m >= nu:
g1 = gamma(n - nu + 0.5)
g2 = gamma(m - nu + 0.5)
g3 = gamma(nu + 0.5)
g4 = gamma(m + n - nu + 1.5)
f1 = factorial(n + m - nu)
f2 = factorial(n - nu)
f3 = factorial(m - nu)
f4 = factorial(nu)
Anm = g1 * g2 * g3 * f1 * (n + m - 2 * nu + 0.5) / (
pi * g4 * f2 * f3 * f4)
kB = special.kv(n + m - 2 * nu + 0.5, kappa *
R) * numpy.sqrt(pi / (2 * kappa * R))
B[n, m] += Anm * kB
M = numpy.zeros((N, N), float)
for i in range(N):
for j in range(N):
M[i, j] = (2 * i + 1) * B[i, j] * (
E_p / epsilon * i * i1[i] - a * kappa * i1p[i])
if i == j:
M[i, j] += (E_p / epsilon * i * k1[i] - a * kappa * k1p[i])
RHS = numpy.zeros(N)
RHS[0] = a * sigma / epsilon
a_coeff = linalg.solve(M, RHS)
a0 = a_coeff[0]
C0 = a * sigma / epsilon
CC0 = qe**2 * Na * 1e-3 * 1e10 / (cal2J * E_0)
E_inter = 4 * pi * a * epsilon * C0 * C0 * CC0(pi * a0 / (2 * C0 * (
kappa * a * numpy.cosh(kappa * a) - numpy.sinh(kappa * a))) - 1 / (
1 + kappa * a) - 1 / (kappa * a * 1 / numpy.tanh(kappa * a) - 1))
return E_inter
def Cext_analytical(radius, wavelength, diel_out, diel_in):
"""
Calculates the analytical solution of the extinction cross section.
This solution is valid when the nano particle involved is a sphere.
Arguments
----------
radius : float, radius of the sphere in [nm].
wavelength: float/array of floats, wavelength of the incident
electric field in [nm].
diel_out : complex/array of complex, dielectric constant inside surface.
diel_in : complex/array of complex, dielectric constant inside surface.
Returns
--------
Cext_an : float/array of floats, extinction cross section.
"""
wavenumber = 2 * numpy.pi * numpy.sqrt(diel_out) / wavelength
C1 = wavenumber**2 * (diel_in / diel_out - 1) / (diel_in / diel_out + 2)
Cext_an = 4 * numpy.pi * radius**3 / wavenumber.real * C1.imag
return Cext_an
| 33.85299
| 136
| 0.492799
| 6,050
| 40,759
| 3.270248
| 0.046942
| 0.04094
| 0.04559
| 0.049735
| 0.823149
| 0.800607
| 0.777761
| 0.752085
| 0.736871
| 0.715542
| 0
| 0.091962
| 0.35597
| 40,759
| 1,203
| 137
| 33.881131
| 0.661752
| 0.247749
| 0
| 0.734472
| 0
| 0
| 0.000578
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029503
| false
| 0
| 0.007764
| 0
| 0.06677
| 0.004658
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4971ece59a38785c935224d9014bca63d714eb89
| 137
|
py
|
Python
|
build.py
|
DonaldMcRonald/SketchRecognitionWithTensorFlow
|
f712a8d78bce8af88365045116b27d56ec71dbec
|
[
"MIT"
] | 2
|
2016-05-12T18:30:39.000Z
|
2016-05-17T14:03:38.000Z
|
build.py
|
DonaldMcRonald/SketchRecognitionWithTensorFlow
|
f712a8d78bce8af88365045116b27d56ec71dbec
|
[
"MIT"
] | 4
|
2016-05-14T22:53:49.000Z
|
2016-05-14T23:40:40.000Z
|
build.py
|
DonaldMcRonald/SketchRecognitionWithTensorFlow
|
f712a8d78bce8af88365045116b27d56ec71dbec
|
[
"MIT"
] | null | null | null |
from pybuilder.core import task
from pybuilder.core import init
@task
def say_hello (logger):
logger.info("Hello, PyBuilder")
| 19.571429
| 36
| 0.729927
| 19
| 137
| 5.210526
| 0.578947
| 0.262626
| 0.343434
| 0.464646
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182482
| 137
| 6
| 37
| 22.833333
| 0.883929
| 0
| 0
| 0
| 0
| 0
| 0.122137
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
497fe6d1032e5044804a166628f6633b8ccdc39f
| 49
|
py
|
Python
|
desktop/core/ext-py/nose-1.3.7/unit_tests/test_isolation_plugin.py
|
kokosing/hue
|
2307f5379a35aae9be871e836432e6f45138b3d9
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
desktop/core/ext-py/nose-1.3.7/unit_tests/test_isolation_plugin.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
desktop/core/ext-py/nose-1.3.7/unit_tests/test_isolation_plugin.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
def test_lint():
import nose.plugins.isolate
| 16.333333
| 31
| 0.734694
| 7
| 49
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 49
| 2
| 32
| 24.5
| 0.853659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b8f1e16fc4be8bec89d23e599d2e7d6bd5272604
| 3,998
|
py
|
Python
|
tests/modules/teams/resources/test_getting_teams_info.py
|
IsmaelJS/test-github-actions
|
97223df261e9736c46875f590c9593dbac0d417b
|
[
"MIT"
] | 1,420
|
2015-11-20T01:25:14.000Z
|
2022-03-22T03:51:33.000Z
|
tests/modules/teams/resources/test_getting_teams_info.py
|
IsmaelJS/test-github-actions
|
97223df261e9736c46875f590c9593dbac0d417b
|
[
"MIT"
] | 151
|
2016-01-07T09:11:42.000Z
|
2020-11-17T08:37:07.000Z
|
tests/modules/teams/resources/test_getting_teams_info.py
|
IsmaelJS/test-github-actions
|
97223df261e9736c46875f590c9593dbac0d417b
|
[
"MIT"
] | 389
|
2015-11-23T01:14:31.000Z
|
2022-02-07T08:23:11.000Z
|
# encoding: utf-8
import pytest
@pytest.mark.parametrize('auth_scopes', (
None,
('teams:write', ),
))
def test_getting_list_of_teams_by_unauthorized_user_must_fail(
flask_app_client,
regular_user,
auth_scopes
):
with flask_app_client.login(regular_user, auth_scopes=auth_scopes):
response = flask_app_client.get('/api/v1/teams/')
assert response.status_code == 401
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
@pytest.mark.parametrize('auth_scopes', (
('teams:read', ),
('teams:read', 'teams:write', ),
))
def test_getting_list_of_teams_by_authorized_user(
flask_app_client,
regular_user,
team_for_regular_user,
auth_scopes
):
with flask_app_client.login(regular_user, auth_scopes=auth_scopes):
response = flask_app_client.get('/api/v1/teams/')
assert response.status_code == 200
assert 'X-Total-Count' in response.headers
assert int(response.headers['X-Total-Count']) == 1
assert response.content_type == 'application/json'
assert isinstance(response.json, list)
assert set(response.json[0].keys()) >= {'id', 'title'}
if response.json[0]['id'] == team_for_regular_user.id:
assert response.json[0]['title'] == team_for_regular_user.title
@pytest.mark.parametrize('auth_scopes', (
None,
('teams:write', ),
))
def test_getting_team_info_by_unauthorized_user_must_fail(
flask_app_client,
regular_user,
team_for_regular_user,
auth_scopes
):
with flask_app_client.login(regular_user, auth_scopes=auth_scopes):
response = flask_app_client.get('/api/v1/teams/%d' % team_for_regular_user.id)
assert response.status_code == 401
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
@pytest.mark.parametrize('auth_scopes', (
('teams:read', ),
('teams:read', 'teams:write', ),
))
def test_getting_team_info_by_authorized_user(
flask_app_client,
regular_user,
team_for_regular_user,
auth_scopes
):
with flask_app_client.login(regular_user, auth_scopes=auth_scopes):
response = flask_app_client.get('/api/v1/teams/%d' % team_for_regular_user.id)
assert response.status_code == 200
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'id', 'title'}
assert response.json['id'] == team_for_regular_user.id
assert response.json['title'] == team_for_regular_user.title
@pytest.mark.parametrize('auth_scopes', (
None,
('teams:write', ),
))
def test_getting_list_of_team_members_by_unauthorized_user_must_fail(
flask_app_client,
regular_user,
team_for_regular_user,
auth_scopes
):
with flask_app_client.login(regular_user, auth_scopes=auth_scopes):
response = flask_app_client.get('/api/v1/teams/%d/members/' % team_for_regular_user.id)
assert response.status_code == 401
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
@pytest.mark.parametrize('auth_scopes', (
('teams:read', ),
('teams:read', 'teams:write', ),
))
def test_getting_list_of_team_members_by_authorized_user(
flask_app_client,
regular_user,
team_for_regular_user,
auth_scopes
):
with flask_app_client.login(regular_user, auth_scopes=auth_scopes):
response = flask_app_client.get('/api/v1/teams/%d/members/' % team_for_regular_user.id)
assert response.status_code == 200
assert response.content_type == 'application/json'
assert isinstance(response.json, list)
assert set(response.json[0].keys()) >= {'team', 'user', 'is_leader'}
assert set(member['team']['id'] for member in response.json) == {team_for_regular_user.id}
assert regular_user.id in set(member['user']['id'] for member in response.json)
| 33.596639
| 95
| 0.694347
| 528
| 3,998
| 4.924242
| 0.125
| 0.114231
| 0.096923
| 0.096923
| 0.923462
| 0.923462
| 0.890769
| 0.890769
| 0.884615
| 0.85
| 0
| 0.00908
| 0.173587
| 3,998
| 118
| 96
| 33.881356
| 0.777845
| 0.003752
| 0
| 0.82
| 0
| 0
| 0.13062
| 0.01256
| 0
| 0
| 0
| 0
| 0.27
| 1
| 0.06
| false
| 0
| 0.01
| 0
| 0.07
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.